Refactor: delete unused crates (#2543)

* refactor: delete user model

* refactor: delete user model crate

* refactor: rm flowy-server-sync crate

* refactor: rm flowy-database and flowy-folder

* refactor: rm folder-model

* refactor: rm database model

* refactor: rm flowy-sync

* refactor: rm document-model

* refactor: rm flowy-document

* refactor: rm flowy-client-sync

* refactor: rm ws-model

* refactor: rm flowy-revisoin

* refactor: rm revision-model

* refactor: rm flowy-folder

* refactor: rm flowy-client-ws

* refactor: move crates

* chore: move configuration file

* ci: fix tauri build'

* ci: fix flutter build

* ci: rust test script

* ci: tauri pnpm version conflict

* ci: tauri build
This commit is contained in:
Nathan.fooo 2023-05-17 09:49:39 +08:00 committed by GitHub
parent 2202326278
commit bc66f43f47
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
514 changed files with 2274 additions and 55304 deletions

View File

@ -73,7 +73,7 @@ jobs:
- name: Run rust-lib tests
working-directory: frontend/rust-lib
run: RUST_LOG=info cargo test --no-default-features --features="sync,rev-sqlite"
run: RUST_LOG=info cargo test --no-default-features --features="rev-sqlite"
- name: rustfmt shared-lib
run: cargo fmt --all -- --check

View File

@ -4,7 +4,7 @@ import 'package:appflowy/plugins/document/application/share_service.dart';
import 'package:appflowy/plugins/document/presentation/editor_plugins/parsers/divider_node_parser.dart';
import 'package:appflowy/plugins/document/presentation/editor_plugins/parsers/math_equation_node_parser.dart';
import 'package:appflowy/plugins/document/presentation/editor_plugins/parsers/code_block_node_parser.dart';
import 'package:appflowy_backend/protobuf/flowy-document/entities.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-document2/entities.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-folder2/view.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:freezed_annotation/freezed_annotation.dart';

View File

@ -1,8 +1,7 @@
import 'dart:async';
import 'package:appflowy_backend/protobuf/flowy-document2/entities.pb.dart';
import 'package:dartz/dartz.dart';
import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-document/protobuf.dart';
import 'package:appflowy_backend/protobuf/flowy-folder2/view.pb.dart';
class ShareService {
@ -10,12 +9,13 @@ class ShareService {
ViewPB view,
ExportType type,
) {
var payload = ExportPayloadPB.create()
..viewId = view.id
..exportType = type
..documentVersion = DocumentVersionPB.V1;
// var payload = ExportPayloadPB.create()
// ..viewId = view.id
// ..exportType = type
// ..documentVersion = DocumentVersionPB.V1;
return DocumentEventExportDocument(payload).send();
// return DocumentEventExportDocument(payload).send();
throw UnimplementedError();
}
Future<Either<ExportDataPB, FlowyError>> exportText(ViewPB view) {

View File

@ -6,6 +6,7 @@ import 'package:appflowy/plugins/document/application/share_bloc.dart';
import 'package:appflowy/workspace/presentation/home/toast.dart';
import 'package:appflowy/workspace/presentation/widgets/dialogs.dart';
import 'package:appflowy/workspace/presentation/widgets/pop_up_action.dart';
import 'package:appflowy_backend/protobuf/flowy-document2/entities.pb.dart';
import 'package:appflowy_popover/appflowy_popover.dart';
import 'package:clipboard/clipboard.dart';
import 'package:easy_localization/easy_localization.dart';
@ -14,7 +15,6 @@ import 'package:flowy_infra_ui/widget/rounded_button.dart';
import 'package:appflowy_backend/log.dart';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-folder2/view.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-document/entities.pb.dart';
import 'package:flutter/material.dart';
import 'package:flutter_bloc/flutter_bloc.dart';

View File

@ -16,7 +16,6 @@ import 'package:appflowy_backend/ffi.dart' as ffi;
import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart';
import 'package:appflowy_backend/protobuf/dart-ffi/protobuf.dart';
import 'package:appflowy_backend/protobuf/flowy-folder2/protobuf.dart';
import 'package:appflowy_backend/protobuf/flowy-document/protobuf.dart';
import 'package:appflowy_backend/protobuf/flowy-database2/protobuf.dart';
import 'package:appflowy_backend/protobuf/flowy-document2/protobuf.dart';
@ -30,7 +29,6 @@ part 'dart_event/flowy-folder2/dart_event.dart';
part 'dart_event/flowy-net/dart_event.dart';
part 'dart_event/flowy-user/dart_event.dart';
part 'dart_event/flowy-database2/dart_event.dart';
part 'dart_event/flowy-document/dart_event.dart';
part 'dart_event/flowy-document2/dart_event.dart';
enum FFIException {

File diff suppressed because it is too large Load Diff

View File

@ -117,6 +117,7 @@ version = "0.0.0"
dependencies = [
"bytes",
"flowy-core",
"flowy-net",
"flowy-notification",
"lib-dispatch",
"serde",
@ -1477,18 +1478,6 @@ dependencies = [
"parking_lot_core 0.9.7",
]
[[package]]
name = "database-model"
version = "0.1.0"
dependencies = [
"bytes",
"indexmap",
"nanoid",
"serde",
"serde_json",
"serde_repr",
]
[[package]]
name = "derivative"
version = "2.2.0"
@ -1598,21 +1587,6 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd0c93bb4b0c6d9b77f4435b0ae98c24d17f1c45b2ff844c6151a07256ca923b"
[[package]]
name = "dissimilar"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "210ec60ae7d710bed8683e333e9d2855a8a56a3e9892b38bad3bb0d4d29b0d5e"
[[package]]
name = "document-model"
version = "0.1.0"
dependencies = [
"revision-model",
"serde",
"serde_json",
]
[[package]]
name = "dtoa"
version = "0.4.8"
@ -1787,56 +1761,6 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "flowy-client-network-config"
version = "0.1.0"
dependencies = [
"config",
"serde",
"serde-aux",
"serde_json",
]
[[package]]
name = "flowy-client-sync"
version = "0.1.0"
dependencies = [
"bytes",
"chrono",
"database-model",
"dissimilar",
"document-model",
"flowy-derive",
"flowy-sync",
"folder-model",
"lib-infra",
"lib-ot",
"parking_lot 0.12.1",
"revision-model",
"serde",
"serde_json",
"strum",
"strum_macros",
"tokio",
"tracing",
"url",
]
[[package]]
name = "flowy-client-ws"
version = "0.1.0"
dependencies = [
"futures-util",
"lib-infra",
"lib-ws",
"parking_lot 0.12.1",
"serde",
"serde_repr",
"thiserror",
"tokio",
"tracing",
]
[[package]]
name = "flowy-codegen"
version = "0.1.0"
@ -1867,15 +1791,11 @@ version = "0.1.0"
dependencies = [
"appflowy-integrate",
"bytes",
"database-model",
"flowy-client-ws",
"flowy-database2",
"flowy-document",
"flowy-document2",
"flowy-error",
"flowy-folder2",
"flowy-net",
"flowy-revision",
"flowy-sqlite",
"flowy-task",
"flowy-user",
@ -1885,13 +1805,10 @@ dependencies = [
"lib-log",
"lib-ws",
"parking_lot 0.12.1",
"revision-model",
"serde",
"serde_json",
"tokio",
"tracing",
"user-model",
"ws-model",
]
[[package]]
@ -1908,7 +1825,6 @@ dependencies = [
"collab",
"collab-database",
"dashmap",
"database-model",
"fancy-regex 0.10.0",
"flowy-codegen",
"flowy-derive",
@ -1951,44 +1867,6 @@ dependencies = [
"walkdir",
]
[[package]]
name = "flowy-document"
version = "0.1.0"
dependencies = [
"async-stream",
"bytes",
"chrono",
"dashmap",
"diesel",
"diesel_derives",
"document-model",
"flowy-client-sync",
"flowy-codegen",
"flowy-derive",
"flowy-error",
"flowy-notification",
"flowy-revision",
"flowy-revision-persistence",
"flowy-sqlite",
"futures",
"futures-util",
"lib-dispatch",
"lib-infra",
"lib-ot",
"lib-ws",
"md5",
"protobuf",
"revision-model",
"serde",
"serde_json",
"strum",
"strum_macros",
"tokio",
"tracing",
"url",
"ws-model",
]
[[package]]
name = "flowy-document2"
version = "0.1.0"
@ -2021,14 +1899,11 @@ dependencies = [
"bytes",
"collab-database",
"collab-document",
"flowy-client-sync",
"flowy-client-ws",
"flowy-codegen",
"flowy-derive",
"flowy-sqlite",
"http-error-code",
"lib-dispatch",
"lib-ot",
"protobuf",
"r2d2",
"reqwest",
@ -2036,7 +1911,6 @@ dependencies = [
"serde_json",
"serde_repr",
"thiserror",
"user-model",
]
[[package]]
@ -2050,7 +1924,6 @@ dependencies = [
"collab-folder",
"flowy-codegen",
"flowy-derive",
"flowy-document",
"flowy-error",
"flowy-notification",
"lazy_static",
@ -2075,20 +1948,12 @@ dependencies = [
"bytes",
"config",
"dashmap",
"document-model",
"flowy-client-network-config",
"flowy-client-sync",
"flowy-client-ws",
"flowy-codegen",
"flowy-derive",
"flowy-document",
"flowy-document2",
"flowy-error",
"flowy-folder2",
"flowy-server-sync",
"flowy-sync",
"flowy-user",
"folder-model",
"futures-util",
"hyper",
"lazy_static",
@ -2099,7 +1964,6 @@ dependencies = [
"parking_lot 0.12.1",
"protobuf",
"reqwest",
"revision-model",
"serde",
"serde-aux",
"serde_json",
@ -2108,8 +1972,6 @@ dependencies = [
"thiserror",
"tokio",
"tracing",
"user-model",
"ws-model",
]
[[package]]
@ -2126,58 +1988,6 @@ dependencies = [
"tracing",
]
[[package]]
name = "flowy-revision"
version = "0.1.0"
dependencies = [
"async-stream",
"bytes",
"dashmap",
"flowy-error",
"flowy-revision-persistence",
"futures",
"futures-util",
"lib-infra",
"lib-ws",
"revision-model",
"serde",
"serde_json",
"strum",
"strum_macros",
"tokio",
"tracing",
"ws-model",
]
[[package]]
name = "flowy-revision-persistence"
version = "0.1.0"
dependencies = [
"flowy-error",
"revision-model",
]
[[package]]
name = "flowy-server-sync"
version = "0.1.0"
dependencies = [
"async-stream",
"bytes",
"dashmap",
"document-model",
"flowy-sync",
"folder-model",
"futures",
"lib-infra",
"lib-ot",
"log",
"revision-model",
"serde",
"tokio",
"tracing",
"ws-model",
]
[[package]]
name = "flowy-sqlite"
version = "0.1.0"
@ -2193,24 +2003,6 @@ dependencies = [
"tracing",
]
[[package]]
name = "flowy-sync"
version = "0.1.0"
dependencies = [
"document-model",
"folder-model",
"lib-infra",
"lib-ot",
"parking_lot 0.12.1",
"revision-model",
"serde",
"strum",
"strum_macros",
"tokio",
"tracing",
"ws-model",
]
[[package]]
name = "flowy-task"
version = "0.1.0"
@ -2230,6 +2022,7 @@ dependencies = [
"bytes",
"diesel",
"diesel_derives",
"fancy-regex 0.11.0",
"flowy-codegen",
"flowy-derive",
"flowy-error",
@ -2248,7 +2041,8 @@ dependencies = [
"strum_macros",
"tokio",
"tracing",
"user-model",
"unicode-segmentation",
"validator",
]
[[package]]
@ -2257,16 +2051,6 @@ version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "folder-model"
version = "0.1.0"
dependencies = [
"chrono",
"nanoid",
"serde",
"serde_repr",
]
[[package]]
name = "foreign-types"
version = "0.3.2"
@ -3025,12 +2809,6 @@ dependencies = [
"serde",
]
[[package]]
name = "indextree"
version = "4.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c40411d0e5c63ef1323c3d09ce5ec6d84d71531e18daed0743fccea279d7deb6"
[[package]]
name = "infer"
version = "0.7.0"
@ -3251,23 +3029,6 @@ dependencies = [
"tracing-subscriber 0.2.25",
]
[[package]]
name = "lib-ot"
version = "0.1.0"
dependencies = [
"bytes",
"indexmap",
"indextree",
"lazy_static",
"log",
"serde",
"serde_json",
"strum",
"strum_macros",
"thiserror",
"tracing",
]
[[package]]
name = "lib-ws"
version = "0.1.0"
@ -4648,16 +4409,6 @@ dependencies = [
"winreg",
]
[[package]]
name = "revision-model"
version = "0.1.0"
dependencies = [
"bytes",
"md5",
"serde",
"serde_json",
]
[[package]]
name = "ring"
version = "0.16.20"
@ -6279,20 +6030,6 @@ version = "2.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8db7427f936968176eaa7cdf81b7f98b980b18495ec28f1b5791ac3bfe3eea9"
[[package]]
name = "user-model"
version = "0.1.0"
dependencies = [
"fancy-regex 0.11.0",
"lazy_static",
"serde",
"serde_repr",
"thiserror",
"tracing",
"unicode-segmentation",
"validator",
]
[[package]]
name = "utf-8"
version = "0.7.6"
@ -6913,17 +6650,6 @@ dependencies = [
"windows-implement",
]
[[package]]
name = "ws-model"
version = "0.1.0"
dependencies = [
"bytes",
"revision-model",
"serde",
"serde_json",
"serde_repr",
]
[[package]]
name = "x11"
version = "2.21.0"

View File

@ -23,6 +23,7 @@ tracing = { version = "0.1", features = ["log"] }
lib-dispatch = { path = "../../rust-lib/lib-dispatch", features = ["use_serde"] }
flowy-core = { path = "../../rust-lib/flowy-core", features = ["rev-sqlite", "ts"] }
flowy-notification = { path = "../../rust-lib/flowy-notification", features = ["ts"] }
flowy-net = { path = "../../rust-lib/flowy-net" }
[features]
# by default Tauri runs in production mode

View File

@ -1,4 +1,5 @@
use flowy_core::{get_client_server_configuration, AppFlowyCore, AppFlowyCoreConfig, DEFAULT_NAME};
use flowy_core::{ AppFlowyCore, AppFlowyCoreConfig, DEFAULT_NAME};
use flowy_net::http_server::self_host::configuration::get_client_server_configuration;
pub fn init_flowy_core() -> AppFlowyCore {
let config_json = include_str!("../tauri.conf.json");

View File

@ -1,5 +1,4 @@
export * from "./models/flowy-user";
export * from "./models/flowy-document";
export * from "./models/flowy-database2";
export * from "./models/flowy-folder2";
export * from "./models/flowy-document2";

View File

@ -157,17 +157,6 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "857253367827bd9d0fd973f0ef15506a96e79e41b0ad7aa691203a4e3214f6c8"
[[package]]
name = "atty"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
dependencies = [
"hermit-abi 0.1.19",
"libc",
"winapi",
]
[[package]]
name = "autocfg"
version = "1.1.0"
@ -574,15 +563,6 @@ dependencies = [
"vsimd",
]
[[package]]
name = "basic-toml"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c0de75129aa8d0cceaf750b89013f0e08804d6ec61416da787b35ad0d7cddf1"
dependencies = [
"serde",
]
[[package]]
name = "bincode"
version = "1.3.3"
@ -770,12 +750,6 @@ dependencies = [
"pkg-config",
]
[[package]]
name = "cast"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
[[package]]
name = "cc"
version = "1.0.79"
@ -871,17 +845,6 @@ dependencies = [
"libloading",
]
[[package]]
name = "clap"
version = "2.34.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c"
dependencies = [
"bitflags",
"textwrap",
"unicode-width",
]
[[package]]
name = "cmd_lib"
version = "1.3.0"
@ -1091,19 +1054,6 @@ dependencies = [
"yrs",
]
[[package]]
name = "color-eyre"
version = "0.5.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f1885697ee8a177096d42f158922251a41973117f6d8a234cee94b9509157b7"
dependencies = [
"backtrace",
"eyre",
"indenter",
"once_cell",
"owo-colors",
]
[[package]]
name = "config"
version = "0.10.1"
@ -1167,12 +1117,6 @@ dependencies = [
"tracing-subscriber 0.3.16",
]
[[package]]
name = "convert_case"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
[[package]]
name = "core-foundation"
version = "0.9.3"
@ -1207,42 +1151,6 @@ dependencies = [
"cfg-if",
]
[[package]]
name = "criterion"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b01d6de93b2b6c65e17c634a26653a29d107b3c98c607c765bf38d041531cd8f"
dependencies = [
"atty",
"cast",
"clap",
"criterion-plot",
"csv",
"itertools",
"lazy_static",
"num-traits",
"oorandom",
"plotters",
"rayon",
"regex",
"serde",
"serde_cbor",
"serde_derive",
"serde_json",
"tinytemplate",
"walkdir",
]
[[package]]
name = "criterion-plot"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2673cc8207403546f45f5fd319a974b1e6983ad1a3ee7e6041650013be041876"
dependencies = [
"cast",
"itertools",
]
[[package]]
name = "crossbeam-channel"
version = "0.5.8"
@ -1296,27 +1204,6 @@ dependencies = [
"typenum",
]
[[package]]
name = "csv"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b015497079b9a9d69c02ad25de6c0a6edef051ea6360a327d0bd05802ef64ad"
dependencies = [
"csv-core",
"itoa",
"ryu",
"serde",
]
[[package]]
name = "csv-core"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90"
dependencies = [
"memchr",
]
[[package]]
name = "cxx"
version = "1.0.94"
@ -1372,6 +1259,7 @@ dependencies = [
"flowy-codegen",
"flowy-core",
"flowy-derive",
"flowy-net",
"flowy-notification",
"lazy_static",
"lib-dispatch",
@ -1397,18 +1285,6 @@ dependencies = [
"parking_lot_core 0.9.7",
]
[[package]]
name = "database-model"
version = "0.1.0"
dependencies = [
"bytes",
"indexmap",
"nanoid",
"serde",
"serde_json",
"serde_repr",
]
[[package]]
name = "derivative"
version = "2.2.0"
@ -1420,19 +1296,6 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "derive_more"
version = "0.99.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
dependencies = [
"convert_case",
"proc-macro2",
"quote",
"rustc_version",
"syn 1.0.109",
]
[[package]]
name = "deunicode"
version = "0.4.3"
@ -1512,21 +1375,6 @@ dependencies = [
"winapi",
]
[[package]]
name = "dissimilar"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "210ec60ae7d710bed8683e333e9d2855a8a56a3e9892b38bad3bb0d4d29b0d5e"
[[package]]
name = "document-model"
version = "0.1.0"
dependencies = [
"revision-model",
"serde",
"serde_json",
]
[[package]]
name = "dyn-clone"
version = "1.0.11"
@ -1594,16 +1442,6 @@ dependencies = [
"backtrace",
]
[[package]]
name = "eyre"
version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c2b6b5a29c02cdc822728b7d7b8ae1bab3e3b05d44522770ddd49722eeac7eb"
dependencies = [
"indenter",
"once_cell",
]
[[package]]
name = "faccess"
version = "0.2.4"
@ -1672,56 +1510,6 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "flowy-client-network-config"
version = "0.1.0"
dependencies = [
"config",
"serde",
"serde-aux",
"serde_json",
]
[[package]]
name = "flowy-client-sync"
version = "0.1.0"
dependencies = [
"bytes",
"chrono",
"database-model",
"dissimilar",
"document-model",
"flowy-derive",
"flowy-sync",
"folder-model",
"lib-infra",
"lib-ot",
"parking_lot 0.12.1",
"revision-model",
"serde",
"serde_json",
"strum",
"strum_macros",
"tokio",
"tracing",
"url",
]
[[package]]
name = "flowy-client-ws"
version = "0.1.0"
dependencies = [
"futures-util",
"lib-infra",
"lib-ws",
"parking_lot 0.12.1",
"serde",
"serde_repr",
"thiserror",
"tokio",
"tracing",
]
[[package]]
name = "flowy-codegen"
version = "0.1.0"
@ -1753,15 +1541,11 @@ dependencies = [
"appflowy-integrate",
"bytes",
"console-subscriber",
"database-model",
"flowy-client-ws",
"flowy-database2",
"flowy-document",
"flowy-document2",
"flowy-error",
"flowy-folder2",
"flowy-net",
"flowy-revision",
"flowy-sqlite",
"flowy-task",
"flowy-user",
@ -1771,13 +1555,10 @@ dependencies = [
"lib-log",
"lib-ws",
"parking_lot 0.12.1",
"revision-model",
"serde",
"serde_json",
"tokio",
"tracing",
"user-model",
"ws-model",
]
[[package]]
@ -1794,7 +1575,6 @@ dependencies = [
"collab",
"collab-database",
"dashmap",
"database-model",
"fancy-regex 0.10.0",
"flowy-codegen",
"flowy-derive",
@ -1831,62 +1611,13 @@ dependencies = [
"flowy-ast",
"flowy-codegen",
"lazy_static",
"log",
"proc-macro2",
"quote",
"serde_json",
"syn 1.0.109",
"tokio",
"trybuild",
"walkdir",
]
[[package]]
name = "flowy-document"
version = "0.1.0"
dependencies = [
"async-stream",
"bytes",
"chrono",
"color-eyre",
"criterion",
"dashmap",
"derive_more",
"diesel",
"diesel_derives",
"document-model",
"flowy-client-sync",
"flowy-codegen",
"flowy-derive",
"flowy-document",
"flowy-error",
"flowy-notification",
"flowy-revision",
"flowy-revision-persistence",
"flowy-sqlite",
"flowy-test",
"futures",
"futures-util",
"lib-dispatch",
"lib-infra",
"lib-ot",
"lib-ws",
"md5",
"protobuf",
"rand 0.8.5",
"revision-model",
"serde",
"serde_json",
"strum",
"strum_macros",
"tokio",
"tracing",
"tracing-subscriber 0.2.25",
"unicode-segmentation",
"url",
"ws-model",
]
[[package]]
name = "flowy-document2"
version = "0.1.0"
@ -1921,14 +1652,11 @@ dependencies = [
"bytes",
"collab-database",
"collab-document",
"flowy-client-sync",
"flowy-client-ws",
"flowy-codegen",
"flowy-derive",
"flowy-sqlite",
"http-error-code",
"lib-dispatch",
"lib-ot",
"protobuf",
"r2d2",
"reqwest",
@ -1936,7 +1664,6 @@ dependencies = [
"serde_json",
"serde_repr",
"thiserror",
"user-model",
]
[[package]]
@ -1950,7 +1677,6 @@ dependencies = [
"collab-folder",
"flowy-codegen",
"flowy-derive",
"flowy-document",
"flowy-error",
"flowy-folder2",
"flowy-notification",
@ -1977,20 +1703,12 @@ dependencies = [
"bytes",
"config",
"dashmap",
"document-model",
"flowy-client-network-config",
"flowy-client-sync",
"flowy-client-ws",
"flowy-codegen",
"flowy-derive",
"flowy-document",
"flowy-document2",
"flowy-error",
"flowy-folder2",
"flowy-server-sync",
"flowy-sync",
"flowy-user",
"folder-model",
"futures-util",
"hyper",
"lazy_static",
@ -2001,7 +1719,6 @@ dependencies = [
"parking_lot 0.12.1",
"protobuf",
"reqwest",
"revision-model",
"serde",
"serde-aux",
"serde_json",
@ -2010,8 +1727,6 @@ dependencies = [
"thiserror",
"tokio",
"tracing",
"user-model",
"ws-model",
]
[[package]]
@ -2028,61 +1743,6 @@ dependencies = [
"tracing",
]
[[package]]
name = "flowy-revision"
version = "0.1.0"
dependencies = [
"async-stream",
"bytes",
"dashmap",
"flowy-error",
"flowy-revision",
"flowy-revision-persistence",
"futures",
"futures-util",
"lib-infra",
"lib-ws",
"nanoid",
"parking_lot 0.12.1",
"revision-model",
"serde",
"serde_json",
"strum",
"strum_macros",
"tokio",
"tracing",
"ws-model",
]
[[package]]
name = "flowy-revision-persistence"
version = "0.1.0"
dependencies = [
"flowy-error",
"revision-model",
]
[[package]]
name = "flowy-server-sync"
version = "0.1.0"
dependencies = [
"async-stream",
"bytes",
"dashmap",
"document-model",
"flowy-sync",
"folder-model",
"futures",
"lib-infra",
"lib-ot",
"log",
"revision-model",
"serde",
"tokio",
"tracing",
"ws-model",
]
[[package]]
name = "flowy-sqlite"
version = "0.1.0"
@ -2100,24 +1760,6 @@ dependencies = [
"tracing",
]
[[package]]
name = "flowy-sync"
version = "0.1.0"
dependencies = [
"document-model",
"folder-model",
"lib-infra",
"lib-ot",
"parking_lot 0.12.1",
"revision-model",
"serde",
"strum",
"strum_macros",
"tokio",
"tracing",
"ws-model",
]
[[package]]
name = "flowy-task"
version = "0.1.0"
@ -2137,9 +1779,7 @@ version = "0.1.0"
dependencies = [
"bytes",
"fake",
"flowy-client-sync",
"flowy-core",
"flowy-document",
"flowy-folder2",
"flowy-net",
"flowy-user",
@ -2152,7 +1792,7 @@ dependencies = [
"nanoid",
"protobuf",
"quickcheck",
"quickcheck_macros",
"quickcheck_macros 0.9.1",
"serde",
"serde_json",
"serial_test",
@ -2168,6 +1808,8 @@ dependencies = [
"bytes",
"diesel",
"diesel_derives",
"fake",
"fancy-regex 0.11.0",
"flowy-codegen",
"flowy-derive",
"flowy-error",
@ -2182,13 +1824,18 @@ dependencies = [
"once_cell",
"parking_lot 0.12.1",
"protobuf",
"quickcheck",
"quickcheck_macros 1.0.0",
"rand 0.8.5",
"rand_core 0.6.4",
"serde",
"serde_json",
"strum",
"strum_macros",
"tokio",
"tracing",
"user-model",
"unicode-segmentation",
"validator",
]
[[package]]
@ -2197,16 +1844,6 @@ version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "folder-model"
version = "0.1.0"
dependencies = [
"chrono",
"nanoid",
"serde",
"serde_repr",
]
[[package]]
name = "foreign-types"
version = "0.3.2"
@ -2438,12 +2075,6 @@ dependencies = [
"tracing",
]
[[package]]
name = "half"
version = "1.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7"
[[package]]
name = "hashbrown"
version = "0.12.3"
@ -2484,15 +2115,6 @@ dependencies = [
"unicode-segmentation",
]
[[package]]
name = "hermit-abi"
version = "0.1.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
dependencies = [
"libc",
]
[[package]]
name = "hermit-abi"
version = "0.2.6"
@ -2708,12 +2330,6 @@ dependencies = [
"winapi-util",
]
[[package]]
name = "indenter"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683"
[[package]]
name = "indexmap"
version = "1.9.3"
@ -3238,12 +2854,6 @@ version = "1.17.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3"
[[package]]
name = "oorandom"
version = "11.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575"
[[package]]
name = "opaque-debug"
version = "0.3.0"
@ -3326,12 +2936,6 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "owo-colors"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2386b4ebe91c2f7f51082d4cefa145d030e33a1842a96b12e4885cc3c01f7a55"
[[package]]
name = "parking_lot"
version = "0.11.2"
@ -3604,34 +3208,6 @@ version = "0.3.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160"
[[package]]
name = "plotters"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2538b639e642295546c50fcd545198c9d64ee2a38620a628724a3b266d5fbf97"
dependencies = [
"num-traits",
"plotters-backend",
"plotters-svg",
"wasm-bindgen",
"web-sys",
]
[[package]]
name = "plotters-backend"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "193228616381fecdc1224c62e96946dfbc73ff4384fba576e052ff8c1bea8142"
[[package]]
name = "plotters-svg"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f9a81d2759aae1dae668f783c308bc5c8ebd191ff4184aaa1b37f65a6ae5a56f"
dependencies = [
"plotters-backend",
]
[[package]]
name = "ppv-lite86"
version = "0.2.17"
@ -3847,6 +3423,17 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "quickcheck_macros"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b22a693222d716a9587786f37ac3f6b4faedb5b80c23914e7303ff5a1d8016e9"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "quote"
version = "1.0.26"
@ -4077,16 +3664,6 @@ dependencies = [
"winreg",
]
[[package]]
name = "revision-model"
version = "0.1.0"
dependencies = [
"bytes",
"md5",
"serde",
"serde_json",
]
[[package]]
name = "ring"
version = "0.16.20"
@ -4377,16 +3954,6 @@ dependencies = [
"serde_json",
]
[[package]]
name = "serde_cbor"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5"
dependencies = [
"half",
"serde",
]
[[package]]
name = "serde_derive"
version = "1.0.160"
@ -4709,15 +4276,6 @@ dependencies = [
"winapi",
]
[[package]]
name = "textwrap"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
dependencies = [
"unicode-width",
]
[[package]]
name = "thiserror"
version = "1.0.40"
@ -4795,16 +4353,6 @@ dependencies = [
"time-core",
]
[[package]]
name = "tinytemplate"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc"
dependencies = [
"serde",
"serde_json",
]
[[package]]
name = "tinyvec"
version = "1.6.0"
@ -5156,21 +4704,6 @@ version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed"
[[package]]
name = "trybuild"
version = "1.0.80"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "501dbdbb99861e4ab6b60eb6a7493956a9defb644fd034bc4a5ef27c693c8a3a"
dependencies = [
"basic-toml",
"glob",
"once_cell",
"serde",
"serde_derive",
"serde_json",
"termcolor",
]
[[package]]
name = "tungstenite"
version = "0.14.0"
@ -5336,20 +4869,6 @@ version = "2.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8db7427f936968176eaa7cdf81b7f98b980b18495ec28f1b5791ac3bfe3eea9"
[[package]]
name = "user-model"
version = "0.1.0"
dependencies = [
"fancy-regex 0.11.0",
"lazy_static",
"serde",
"serde_repr",
"thiserror",
"tracing",
"unicode-segmentation",
"validator",
]
[[package]]
name = "utf-8"
version = "0.7.6"
@ -5726,17 +5245,6 @@ dependencies = [
"winapi",
]
[[package]]
name = "ws-model"
version = "0.1.0"
dependencies = [
"bytes",
"revision-model",
"serde",
"serde_json",
"serde_repr",
]
[[package]]
name = "xmlparser"
version = "0.13.5"

View File

@ -8,21 +8,12 @@ members = [
"flowy-user",
"flowy-test",
"flowy-sqlite",
# "flowy-folder",r
"flowy-folder2",
"flowy-notification",
"flowy-document2",
"flowy-document",
"flowy-error",
"flowy-revision",
"flowy-revision-persistence",
# "flowy-database",
"flowy-database2",
"flowy-task",
"flowy-client-sync",
"flowy-derive",
"flowy-ast",
"flowy-codegen",
]
[profile.dev]

View File

@ -29,7 +29,8 @@ tracing = { version = "0.1", features = ["log"] }
lib-dispatch = { path = "../lib-dispatch" }
flowy-core = { path = "../flowy-core" }
flowy-notification = { path = "../flowy-notification" }
flowy-derive = { path = "../flowy-derive" }
flowy-net = { path = "../flowy-net" }
flowy-derive = { path = "../../../shared-lib/flowy-derive" }
[features]
default = ["dart", "rev-sqlite"]
@ -39,4 +40,4 @@ http_sync = ["flowy-core/http_sync", "flowy-core/use_bunyan"]
openssl_vendored = ["flowy-core/openssl_vendored"]
[build-dependencies]
flowy-codegen = { path = "../flowy-codegen", features = ["dart"] }
flowy-codegen = { path = "../../../shared-lib/flowy-codegen", features = ["dart"] }

View File

@ -1,23 +1,27 @@
#![allow(clippy::not_unsafe_ptr_arg_deref)]
mod c;
mod model;
mod notification;
mod protobuf;
mod util;
use std::{ffi::CStr, os::raw::c_char};
use lazy_static::lazy_static;
use parking_lot::RwLock;
use flowy_core::*;
use flowy_net::http_server::self_host::configuration::get_client_server_configuration;
use flowy_notification::register_notification_sender;
use lib_dispatch::prelude::ToBytes;
use lib_dispatch::prelude::*;
use crate::notification::DartNotificationSender;
use crate::{
c::{extend_front_four_bytes_into_bytes, forget_rust},
model::{FFIRequest, FFIResponse},
};
use flowy_core::get_client_server_configuration;
use flowy_core::*;
use flowy_notification::register_notification_sender;
use lazy_static::lazy_static;
use lib_dispatch::prelude::ToBytes;
use lib_dispatch::prelude::*;
use parking_lot::RwLock;
use std::{ffi::CStr, os::raw::c_char};
mod c;
mod model;
mod notification;
mod protobuf;
mod util;
lazy_static! {
static ref APPFLOWY_CORE: RwLock<Option<AppFlowyCore>> = RwLock::new(None);

View File

@ -1,27 +0,0 @@
[package]
name = "flowy-client-sync"
version = "0.1.0"
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
lib-ot = { path = "../../../shared-lib/lib-ot" }
lib-infra = { path = "../../../shared-lib/lib-infra" }
flowy-derive = { path = "../flowy-derive" }
folder-model = { path = "../../../shared-lib/folder-model" }
database-model = { path = "../../../shared-lib/database-model" }
revision-model = { path = "../../../shared-lib/revision-model" }
document-model = { path = "../../../shared-lib/document-model" }
flowy-sync = { path = "../../../shared-lib/flowy-sync" }
bytes = "1.4"
tokio = { version = "1.26", features = ["full"] }
serde = { version = "1.0", features = ["derive", "rc"] }
serde_json = {version = "1.0"}
dissimilar = "1.0"
tracing = { version = "0.1", features = ["log"] }
url = "2.3"
strum = "0.21"
strum_macros = "0.21"
chrono = "0.4.23"
parking_lot = "0.12.1"

View File

@ -1,491 +0,0 @@
use crate::errors::{SyncError, SyncResult};
use crate::util::cal_diff;
use database_model::{
gen_block_id, gen_row_id, CellRevision, DatabaseBlockRevision, RowChangeset, RowRevision,
};
use flowy_sync::util::make_operations_from_revisions;
use lib_infra::util::md5;
use lib_ot::core::{DeltaBuilder, DeltaOperations, EmptyAttributes, OperationTransform};
use revision_model::Revision;
use std::any::type_name;
use std::borrow::Cow;
use std::collections::HashMap;
use std::sync::Arc;
pub type DatabaseBlockOperations = DeltaOperations<EmptyAttributes>;
pub type DatabaseBlockOperationsBuilder = DeltaBuilder;
#[derive(Debug, Clone)]
pub struct DatabaseBlockRevisionPad {
block: DatabaseBlockRevision,
operations: DatabaseBlockOperations,
}
impl std::ops::Deref for DatabaseBlockRevisionPad {
type Target = DatabaseBlockRevision;
fn deref(&self) -> &Self::Target {
&self.block
}
}
impl DatabaseBlockRevisionPad {
pub fn duplicate_data(&self, duplicated_block_id: &str) -> DatabaseBlockRevision {
let duplicated_rows = self
.block
.rows
.iter()
.map(|row| {
let mut duplicated_row = row.as_ref().clone();
duplicated_row.id = gen_row_id();
duplicated_row.block_id = duplicated_block_id.to_string();
Arc::new(duplicated_row)
})
.collect::<Vec<Arc<RowRevision>>>();
DatabaseBlockRevision {
block_id: duplicated_block_id.to_string(),
rows: duplicated_rows,
}
}
pub fn from_operations(operations: DatabaseBlockOperations) -> SyncResult<Self> {
let s = operations.content()?;
let revision: DatabaseBlockRevision = serde_json::from_str(&s).map_err(|e| {
let msg = format!(
"Deserialize operations to {} failed: {}",
type_name::<DatabaseBlockRevision>(),
e
);
tracing::error!("{}", s);
SyncError::internal().context(msg)
})?;
Ok(Self {
block: revision,
operations,
})
}
pub fn from_revisions(revisions: Vec<Revision>) -> SyncResult<Self> {
let operations: DatabaseBlockOperations = make_operations_from_revisions(revisions)?;
Self::from_operations(operations)
}
#[tracing::instrument(level = "trace", skip(self, row), err)]
pub fn add_row_rev(
&mut self,
row: RowRevision,
start_row_id: Option<String>,
) -> SyncResult<Option<DatabaseBlockRevisionChangeset>> {
self.modify(|rows| {
if let Some(start_row_id) = start_row_id {
if !start_row_id.is_empty() {
if let Some(index) = rows.iter().position(|row| row.id == start_row_id) {
rows.insert(index + 1, Arc::new(row));
return Ok(Some(()));
}
}
}
rows.push(Arc::new(row));
Ok(Some(()))
})
}
pub fn delete_rows(
&mut self,
row_ids: Vec<Cow<'_, String>>,
) -> SyncResult<Option<DatabaseBlockRevisionChangeset>> {
self.modify(|rows| {
rows.retain(|row| !row_ids.contains(&Cow::Borrowed(&row.id)));
Ok(Some(()))
})
}
pub fn get_row_rev(&self, row_id: &str) -> Option<(usize, Arc<RowRevision>)> {
for (index, row) in self.block.rows.iter().enumerate() {
if row.id == row_id {
return Some((index, row.clone()));
}
}
None
}
pub fn get_row_revs<T>(
&self,
row_ids: Option<Vec<Cow<'_, T>>>,
) -> SyncResult<Vec<Arc<RowRevision>>>
where
T: AsRef<str> + ToOwned + ?Sized,
{
match row_ids {
None => Ok(self.block.rows.clone()),
Some(row_ids) => {
let row_map = self
.block
.rows
.iter()
.map(|row| (row.id.as_str(), row.clone()))
.collect::<HashMap<&str, Arc<RowRevision>>>();
Ok(
row_ids
.iter()
.flat_map(|row_id| {
let row_id = row_id.as_ref().as_ref();
match row_map.get(row_id) {
None => {
tracing::error!("Can't find the row with id: {}", row_id);
None
},
Some(row) => Some(row.clone()),
}
})
.collect::<Vec<_>>(),
)
},
}
}
pub fn get_cell_revs(
&self,
field_id: &str,
row_ids: Option<Vec<Cow<'_, String>>>,
) -> SyncResult<Vec<CellRevision>> {
let rows = self.get_row_revs(row_ids)?;
let cell_revs = rows
.iter()
.flat_map(|row| {
let cell_rev = row.cells.get(field_id)?;
Some(cell_rev.clone())
})
.collect::<Vec<CellRevision>>();
Ok(cell_revs)
}
pub fn number_of_rows(&self) -> i32 {
self.block.rows.len() as i32
}
pub fn index_of_row(&self, row_id: &str) -> Option<usize> {
self.block.rows.iter().position(|row| row.id == row_id)
}
pub fn update_row(
&mut self,
changeset: RowChangeset,
) -> SyncResult<Option<DatabaseBlockRevisionChangeset>> {
let row_id = changeset.row_id.clone();
self.modify_row(&row_id, |row| {
let mut is_changed = None;
if let Some(height) = changeset.height {
row.height = height;
is_changed = Some(());
}
if let Some(visibility) = changeset.visibility {
row.visibility = visibility;
is_changed = Some(());
}
if !changeset.cell_by_field_id.is_empty() {
is_changed = Some(());
changeset
.cell_by_field_id
.into_iter()
.for_each(|(field_id, cell)| {
row.cells.insert(field_id, cell);
})
}
Ok(is_changed)
})
}
pub fn move_row(
&mut self,
row_id: &str,
from: usize,
to: usize,
) -> SyncResult<Option<DatabaseBlockRevisionChangeset>> {
self.modify(|row_revs| {
if let Some(position) = row_revs.iter().position(|row_rev| row_rev.id == row_id) {
debug_assert_eq!(from, position);
let row_rev = row_revs.remove(position);
if to > row_revs.len() {
Err(SyncError::out_of_bound())
} else {
row_revs.insert(to, row_rev);
Ok(Some(()))
}
} else {
Ok(None)
}
})
}
pub fn modify<F>(&mut self, f: F) -> SyncResult<Option<DatabaseBlockRevisionChangeset>>
where
F: for<'a> FnOnce(&'a mut Vec<Arc<RowRevision>>) -> SyncResult<Option<()>>,
{
let cloned_self = self.clone();
match f(&mut self.block.rows)? {
None => Ok(None),
Some(_) => {
let old = cloned_self.revision_json()?;
let new = self.revision_json()?;
match cal_diff::<EmptyAttributes>(old, new) {
None => Ok(None),
Some(operations) => {
tracing::trace!(
"[{}] Composing operations {}",
type_name::<DatabaseBlockRevision>(),
operations.json_str()
);
self.operations = self.operations.compose(&operations)?;
Ok(Some(DatabaseBlockRevisionChangeset {
operations,
md5: md5(&self.operations.json_bytes()),
}))
},
}
},
}
}
fn modify_row<F>(
&mut self,
row_id: &str,
f: F,
) -> SyncResult<Option<DatabaseBlockRevisionChangeset>>
where
F: FnOnce(&mut RowRevision) -> SyncResult<Option<()>>,
{
self.modify(|rows| {
if let Some(row_rev) = rows.iter_mut().find(|row_rev| row_id == row_rev.id) {
f(Arc::make_mut(row_rev))
} else {
tracing::warn!("[BlockMetaPad]: Can't find any row with id: {}", row_id);
Ok(None)
}
})
}
pub fn revision_json(&self) -> SyncResult<String> {
serde_json::to_string(&self.block)
.map_err(|e| SyncError::internal().context(format!("serial block to json failed: {}", e)))
}
pub fn operations_json_str(&self) -> String {
self.operations.json_str()
}
}
pub struct DatabaseBlockRevisionChangeset {
pub operations: DatabaseBlockOperations,
/// md5: the md5 of the grid after applying the change.
pub md5: String,
}
pub fn make_database_block_operations(
block_rev: &DatabaseBlockRevision,
) -> DatabaseBlockOperations {
let json = serde_json::to_string(&block_rev).unwrap();
DatabaseBlockOperationsBuilder::new().insert(&json).build()
}
pub fn make_database_block_revisions(
_user_id: &str,
database_block_meta_data: &DatabaseBlockRevision,
) -> Vec<Revision> {
let operations = make_database_block_operations(database_block_meta_data);
let bytes = operations.json_bytes();
let revision = Revision::initial_revision(&database_block_meta_data.block_id, bytes);
vec![revision]
}
impl std::default::Default for DatabaseBlockRevisionPad {
fn default() -> Self {
let block_revision = DatabaseBlockRevision {
block_id: gen_block_id(),
rows: vec![],
};
let operations = make_database_block_operations(&block_revision);
DatabaseBlockRevisionPad {
block: block_revision,
operations,
}
}
}
#[cfg(test)]
mod tests {
use crate::client_database::{DatabaseBlockOperations, DatabaseBlockRevisionPad};
use database_model::{RowChangeset, RowRevision};
use std::borrow::Cow;
#[test]
fn block_meta_add_row() {
let mut pad = test_pad();
let row = RowRevision {
id: "1".to_string(),
block_id: pad.block_id.clone(),
cells: Default::default(),
height: 0,
visibility: false,
};
let change = pad.add_row_rev(row.clone(), None).unwrap().unwrap();
assert_eq!(pad.rows.first().unwrap().as_ref(), &row);
assert_eq!(
change.operations.json_str(),
r#"[{"retain":24},{"insert":"{\"id\":\"1\",\"block_id\":\"1\",\"cells\":[],\"height\":0,\"visibility\":false}"},{"retain":2}]"#
);
}
#[test]
fn block_meta_insert_row() {
let mut pad = test_pad();
let row_1 = test_row_rev("1", &pad);
let row_2 = test_row_rev("2", &pad);
let row_3 = test_row_rev("3", &pad);
let change = pad.add_row_rev(row_1.clone(), None).unwrap().unwrap();
assert_eq!(
change.operations.json_str(),
r#"[{"retain":24},{"insert":"{\"id\":\"1\",\"block_id\":\"1\",\"cells\":[],\"height\":0,\"visibility\":false}"},{"retain":2}]"#
);
let change = pad.add_row_rev(row_2.clone(), None).unwrap().unwrap();
assert_eq!(
change.operations.json_str(),
r#"[{"retain":90},{"insert":",{\"id\":\"2\",\"block_id\":\"1\",\"cells\":[],\"height\":0,\"visibility\":false}"},{"retain":2}]"#
);
let change = pad
.add_row_rev(row_3.clone(), Some("2".to_string()))
.unwrap()
.unwrap();
assert_eq!(
change.operations.json_str(),
r#"[{"retain":157},{"insert":",{\"id\":\"3\",\"block_id\":\"1\",\"cells\":[],\"height\":0,\"visibility\":false}"},{"retain":2}]"#
);
assert_eq!(*pad.rows[0], row_1);
assert_eq!(*pad.rows[1], row_2);
assert_eq!(*pad.rows[2], row_3);
}
fn test_row_rev(id: &str, pad: &DatabaseBlockRevisionPad) -> RowRevision {
RowRevision {
id: id.to_string(),
block_id: pad.block_id.clone(),
cells: Default::default(),
height: 0,
visibility: false,
}
}
#[test]
fn block_meta_insert_row2() {
let mut pad = test_pad();
let row_1 = test_row_rev("1", &pad);
let row_2 = test_row_rev("2", &pad);
let row_3 = test_row_rev("3", &pad);
let _ = pad.add_row_rev(row_1.clone(), None).unwrap().unwrap();
let _ = pad.add_row_rev(row_2.clone(), None).unwrap().unwrap();
let _ = pad
.add_row_rev(row_3.clone(), Some("1".to_string()))
.unwrap()
.unwrap();
assert_eq!(*pad.rows[0], row_1);
assert_eq!(*pad.rows[1], row_3);
assert_eq!(*pad.rows[2], row_2);
}
#[test]
fn block_meta_insert_row3() {
let mut pad = test_pad();
let row_1 = test_row_rev("1", &pad);
let row_2 = test_row_rev("2", &pad);
let row_3 = test_row_rev("3", &pad);
let _ = pad.add_row_rev(row_1.clone(), None).unwrap().unwrap();
let _ = pad.add_row_rev(row_2.clone(), None).unwrap().unwrap();
let _ = pad
.add_row_rev(row_3.clone(), Some("".to_string()))
.unwrap()
.unwrap();
assert_eq!(*pad.rows[0], row_1);
assert_eq!(*pad.rows[1], row_2);
assert_eq!(*pad.rows[2], row_3);
}
#[test]
fn block_meta_delete_row() {
let mut pad = test_pad();
let pre_json_str = pad.operations_json_str();
let row = RowRevision {
id: "1".to_string(),
block_id: pad.block_id.clone(),
cells: Default::default(),
height: 0,
visibility: false,
};
let _ = pad.add_row_rev(row.clone(), None).unwrap().unwrap();
let change = pad
.delete_rows(vec![Cow::Borrowed(&row.id)])
.unwrap()
.unwrap();
assert_eq!(
change.operations.json_str(),
r#"[{"retain":24},{"delete":66},{"retain":2}]"#
);
assert_eq!(pad.operations_json_str(), pre_json_str);
}
#[test]
fn block_meta_update_row() {
let mut pad = test_pad();
let row = RowRevision {
id: "1".to_string(),
block_id: pad.block_id.clone(),
cells: Default::default(),
height: 0,
visibility: false,
};
let changeset = RowChangeset {
row_id: row.id.clone(),
height: Some(100),
visibility: Some(true),
cell_by_field_id: Default::default(),
};
let _ = pad.add_row_rev(row, None).unwrap().unwrap();
let change = pad.update_row(changeset).unwrap().unwrap();
assert_eq!(
change.operations.json_str(),
r#"[{"retain":69},{"insert":"10"},{"retain":15},{"insert":"tru"},{"delete":4},{"retain":4}]"#
);
assert_eq!(
pad.revision_json().unwrap(),
r#"{"block_id":"1","rows":[{"id":"1","block_id":"1","cells":[],"height":100,"visibility":true}]}"#
);
}
fn test_pad() -> DatabaseBlockRevisionPad {
let operations =
DatabaseBlockOperations::from_json(r#"[{"insert":"{\"block_id\":\"1\",\"rows\":[]}"}]"#)
.unwrap();
DatabaseBlockRevisionPad::from_operations(operations).unwrap()
}
}

View File

@ -1,81 +0,0 @@
use crate::errors::{SyncError, SyncResult};
use database_model::{
BuildDatabaseContext, DatabaseBlockMetaRevision, DatabaseBlockRevision, FieldRevision,
LayoutSetting, RowRevision,
};
use std::sync::Arc;
pub struct DatabaseBuilder {
build_context: BuildDatabaseContext,
}
impl std::default::Default for DatabaseBuilder {
fn default() -> Self {
let mut build_context = BuildDatabaseContext::new();
let block_meta = DatabaseBlockMetaRevision::new();
let block_meta_data = DatabaseBlockRevision {
block_id: block_meta.block_id.clone(),
rows: vec![],
};
build_context.block_metas.push(block_meta);
build_context.blocks.push(block_meta_data);
DatabaseBuilder { build_context }
}
}
impl DatabaseBuilder {
pub fn new() -> Self {
Self::default()
}
pub fn add_field(&mut self, field: FieldRevision) {
self.build_context.field_revs.push(Arc::new(field));
}
pub fn add_row(&mut self, row_rev: RowRevision) {
let block_meta_rev = self.build_context.block_metas.first_mut().unwrap();
let block_rev = self.build_context.blocks.first_mut().unwrap();
block_rev.rows.push(Arc::new(row_rev));
block_meta_rev.row_count += 1;
}
pub fn add_empty_row(&mut self) {
let row = RowRevision::new(self.block_id());
self.add_row(row);
}
pub fn field_revs(&self) -> &Vec<Arc<FieldRevision>> {
&self.build_context.field_revs
}
pub fn block_id(&self) -> &str {
&self.build_context.block_metas.first().unwrap().block_id
}
pub fn set_layout_setting(&mut self, layout_setting: LayoutSetting) {
self.build_context.layout_setting = layout_setting;
}
pub fn build(self) -> BuildDatabaseContext {
self.build_context
}
}
#[allow(dead_code)]
fn check_rows(fields: &[FieldRevision], rows: &[RowRevision]) -> SyncResult<()> {
let field_ids = fields
.iter()
.map(|field| &field.id)
.collect::<Vec<&String>>();
for row in rows {
let cell_field_ids = row.cells.keys().into_iter().collect::<Vec<&String>>();
if cell_field_ids != field_ids {
let msg = format!("{:?} contains invalid cells", row);
return Err(SyncError::internal().context(msg));
}
}
Ok(())
}

View File

@ -1,488 +0,0 @@
use crate::errors::{internal_sync_error, SyncError, SyncResult};
use crate::util::cal_diff;
use database_model::{
gen_block_id, gen_database_id, DatabaseBlockMetaRevision, DatabaseBlockMetaRevisionChangeset,
DatabaseRevision, FieldRevision, FieldTypeRevision,
};
use flowy_sync::util::make_operations_from_revisions;
use lib_infra::util::md5;
use lib_infra::util::move_vec_element;
use lib_ot::core::{DeltaOperationBuilder, DeltaOperations, EmptyAttributes, OperationTransform};
use revision_model::Revision;
use std::collections::HashMap;
use std::sync::Arc;
pub type DatabaseOperations = DeltaOperations<EmptyAttributes>;
pub type DatabaseOperationsBuilder = DeltaOperationBuilder<EmptyAttributes>;
#[derive(Clone)]
pub struct DatabaseRevisionPad {
database_rev: Arc<DatabaseRevision>,
operations: DatabaseOperations,
}
pub trait JsonDeserializer {
fn deserialize(&self, type_option_data: Vec<u8>) -> SyncResult<String>;
}
impl DatabaseRevisionPad {
pub fn database_id(&self) -> String {
self.database_rev.database_id.clone()
}
pub async fn duplicate_database_block_meta(
&self,
) -> (Vec<FieldRevision>, Vec<DatabaseBlockMetaRevision>) {
let fields = self
.database_rev
.fields
.iter()
.map(|field_rev| field_rev.as_ref().clone())
.collect();
let blocks = self
.database_rev
.blocks
.iter()
.map(|block| {
let mut duplicated_block = (**block).clone();
duplicated_block.block_id = gen_block_id();
duplicated_block
})
.collect::<Vec<DatabaseBlockMetaRevision>>();
(fields, blocks)
}
pub fn from_operations(operations: DatabaseOperations) -> SyncResult<Self> {
let content = operations.content()?;
let database_rev: DatabaseRevision = serde_json::from_str(&content).map_err(|e| {
let msg = format!("Deserialize operations to database failed: {}", e);
SyncError::internal().context(msg)
})?;
Ok(Self {
database_rev: Arc::new(database_rev),
operations,
})
}
pub fn from_revisions(revisions: Vec<Revision>) -> SyncResult<Self> {
let operations: DatabaseOperations = make_operations_from_revisions(revisions)?;
Self::from_operations(operations)
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub fn create_field_rev(
&mut self,
new_field_rev: FieldRevision,
start_field_id: Option<String>,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_database(|grid_meta| {
// Check if the field exists or not
if grid_meta
.fields
.iter()
.any(|field_rev| field_rev.id == new_field_rev.id)
{
tracing::error!("Duplicate grid field");
return Ok(None);
}
let insert_index = match start_field_id {
None => None,
Some(start_field_id) => grid_meta
.fields
.iter()
.position(|field| field.id == start_field_id),
};
let new_field_rev = Arc::new(new_field_rev);
match insert_index {
None => grid_meta.fields.push(new_field_rev),
Some(index) => grid_meta.fields.insert(index, new_field_rev),
}
Ok(Some(()))
})
}
pub fn delete_field_rev(
&mut self,
field_id: &str,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_database(|database| {
match database
.fields
.iter()
.position(|field| field.id == field_id)
{
None => Ok(None),
Some(index) => {
if database.fields[index].is_primary {
Err(SyncError::can_not_delete_primary_field())
} else {
database.fields.remove(index);
Ok(Some(()))
}
},
}
})
}
pub fn duplicate_field_rev(
&mut self,
field_id: &str,
duplicated_field_id: &str,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_database(|grid_meta| {
match grid_meta
.fields
.iter()
.position(|field| field.id == field_id)
{
None => Ok(None),
Some(index) => {
let mut duplicate_field_rev = grid_meta.fields[index].as_ref().clone();
duplicate_field_rev.id = duplicated_field_id.to_string();
duplicate_field_rev.name = format!("{} (copy)", duplicate_field_rev.name);
grid_meta
.fields
.insert(index + 1, Arc::new(duplicate_field_rev));
Ok(Some(()))
},
}
})
}
/// Modifies the current field type of the [FieldTypeRevision]
///
/// # Arguments
///
/// * `field_id`: the id of the field
/// * `field_type`: the new field type of the field
/// * `make_default_type_option`: create the field type's type-option data
/// * `type_option_transform`: create the field type's type-option data
///
///
pub fn switch_to_field<DT, TT, T>(
&mut self,
field_id: &str,
new_field_type: T,
make_default_type_option: DT,
type_option_transform: TT,
) -> SyncResult<Option<DatabaseRevisionChangeset>>
where
DT: FnOnce() -> String,
TT: FnOnce(FieldTypeRevision, Option<String>, String) -> String,
T: Into<FieldTypeRevision>,
{
let new_field_type = new_field_type.into();
self.modify_database(|database_rev| {
match database_rev
.fields
.iter_mut()
.find(|field_rev| field_rev.id == field_id)
{
None => {
tracing::warn!("Can not find the field with id: {}", field_id);
Ok(None)
},
Some(field_rev) => {
let mut_field_rev = Arc::make_mut(field_rev);
let old_field_type_rev = mut_field_rev.ty;
let old_field_type_option = mut_field_rev
.get_type_option_str(mut_field_rev.ty)
.map(|value| value.to_owned());
match mut_field_rev.get_type_option_str(new_field_type) {
Some(new_field_type_option) => {
let transformed_type_option = type_option_transform(
old_field_type_rev,
old_field_type_option,
new_field_type_option.to_owned(),
);
mut_field_rev.insert_type_option_str(&new_field_type, transformed_type_option);
},
None => {
// If the type-option data isn't exist before, creating the default type-option data.
let new_field_type_option = make_default_type_option();
let transformed_type_option = type_option_transform(
old_field_type_rev,
old_field_type_option,
new_field_type_option,
);
mut_field_rev.insert_type_option_str(&new_field_type, transformed_type_option);
},
}
mut_field_rev.ty = new_field_type;
Ok(Some(()))
},
}
})
}
pub fn replace_field_rev(
&mut self,
field_rev: Arc<FieldRevision>,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_database(|grid_meta| {
match grid_meta
.fields
.iter()
.position(|field| field.id == field_rev.id)
{
None => Ok(None),
Some(index) => {
grid_meta.fields.remove(index);
grid_meta.fields.insert(index, field_rev);
Ok(Some(()))
},
}
})
}
pub fn move_field(
&mut self,
field_id: &str,
from_index: usize,
to_index: usize,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_database(|grid_meta| {
match move_vec_element(
&mut grid_meta.fields,
|field| field.id == field_id,
from_index,
to_index,
)
.map_err(internal_sync_error)?
{
true => Ok(Some(())),
false => Ok(None),
}
})
}
pub fn contain_field(&self, field_id: &str) -> bool {
self
.database_rev
.fields
.iter()
.any(|field| field.id == field_id)
}
pub fn get_field_rev(&self, field_id: &str) -> Option<(usize, &Arc<FieldRevision>)> {
self
.database_rev
.fields
.iter()
.enumerate()
.find(|(_, field)| field.id == field_id)
}
pub fn get_field_revs(
&self,
field_ids: Option<Vec<String>>,
) -> SyncResult<Vec<Arc<FieldRevision>>> {
match field_ids {
None => Ok(self.database_rev.fields.clone()),
Some(field_ids) => {
let field_by_field_id = self
.database_rev
.fields
.iter()
.map(|field| (&field.id, field))
.collect::<HashMap<&String, &Arc<FieldRevision>>>();
let fields = field_ids
.iter()
.flat_map(|field_id| match field_by_field_id.get(&field_id) {
None => {
tracing::error!("Can't find the field with id: {}", field_id);
None
},
Some(field) => Some((*field).clone()),
})
.collect::<Vec<Arc<FieldRevision>>>();
Ok(fields)
},
}
}
pub fn create_block_meta_rev(
&mut self,
block: DatabaseBlockMetaRevision,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_database(|grid_meta| {
if grid_meta.blocks.iter().any(|b| b.block_id == block.block_id) {
tracing::warn!("Duplicate grid block");
Ok(None)
} else {
match grid_meta.blocks.last() {
None => grid_meta.blocks.push(Arc::new(block)),
Some(last_block) => {
if last_block.start_row_index > block.start_row_index
&& last_block.len() > block.start_row_index
{
let msg = "GridBlock's start_row_index should be greater than the last_block's start_row_index and its len".to_string();
return Err(SyncError::internal().context(msg))
}
grid_meta.blocks.push(Arc::new(block));
}
}
Ok(Some(()))
}
})
}
pub fn get_block_meta_revs(&self) -> Vec<Arc<DatabaseBlockMetaRevision>> {
self.database_rev.blocks.clone()
}
pub fn update_block_rev(
&mut self,
changeset: DatabaseBlockMetaRevisionChangeset,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
let block_id = changeset.block_id.clone();
self.modify_block(&block_id, |block| {
let mut is_changed = None;
if let Some(row_count) = changeset.row_count {
block.row_count = row_count;
is_changed = Some(());
}
if let Some(start_row_index) = changeset.start_row_index {
block.start_row_index = start_row_index;
is_changed = Some(());
}
Ok(is_changed)
})
}
pub fn database_md5(&self) -> String {
md5(&self.operations.json_bytes())
}
pub fn operations_json_str(&self) -> String {
self.operations.json_str()
}
pub fn get_fields(&self) -> &[Arc<FieldRevision>] {
&self.database_rev.fields
}
fn modify_database<F>(&mut self, f: F) -> SyncResult<Option<DatabaseRevisionChangeset>>
where
F: FnOnce(&mut DatabaseRevision) -> SyncResult<Option<()>>,
{
let cloned_database = self.database_rev.clone();
match f(Arc::make_mut(&mut self.database_rev))? {
None => Ok(None),
Some(_) => {
let old = make_database_rev_json_str(&cloned_database)?;
let new = self.json_str()?;
match cal_diff::<EmptyAttributes>(old, new) {
None => Ok(None),
Some(operations) => {
self.operations = self.operations.compose(&operations)?;
Ok(Some(DatabaseRevisionChangeset {
operations,
md5: self.database_md5(),
}))
},
}
},
}
}
fn modify_block<F>(
&mut self,
block_id: &str,
f: F,
) -> SyncResult<Option<DatabaseRevisionChangeset>>
where
F: FnOnce(&mut DatabaseBlockMetaRevision) -> SyncResult<Option<()>>,
{
self.modify_database(|grid_rev| {
match grid_rev
.blocks
.iter()
.position(|block| block.block_id == block_id)
{
None => {
tracing::warn!("[GridMetaPad]: Can't find any block with id: {}", block_id);
Ok(None)
},
Some(index) => {
let block_rev = Arc::make_mut(&mut grid_rev.blocks[index]);
f(block_rev)
},
}
})
}
pub fn modify_field<F>(
&mut self,
field_id: &str,
f: F,
) -> SyncResult<Option<DatabaseRevisionChangeset>>
where
F: FnOnce(&mut FieldRevision) -> SyncResult<Option<()>>,
{
self.modify_database(|grid_rev| {
match grid_rev
.fields
.iter()
.position(|field| field.id == field_id)
{
None => {
tracing::warn!("[GridMetaPad]: Can't find any field with id: {}", field_id);
Ok(None)
},
Some(index) => {
let mut_field_rev = Arc::make_mut(&mut grid_rev.fields[index]);
f(mut_field_rev)
},
}
})
}
pub fn json_str(&self) -> SyncResult<String> {
make_database_rev_json_str(&self.database_rev)
}
}
pub fn make_database_rev_json_str(grid_revision: &DatabaseRevision) -> SyncResult<String> {
let json = serde_json::to_string(grid_revision)
.map_err(|err| internal_sync_error(format!("Serialize grid to json str failed. {:?}", err)))?;
Ok(json)
}
pub struct DatabaseRevisionChangeset {
pub operations: DatabaseOperations,
/// md5: the md5 of the grid after applying the change.
pub md5: String,
}
pub fn make_database_operations(grid_rev: &DatabaseRevision) -> DatabaseOperations {
let json = serde_json::to_string(&grid_rev).unwrap();
DatabaseOperationsBuilder::new().insert(&json).build()
}
pub fn make_database_revisions(_user_id: &str, grid_rev: &DatabaseRevision) -> Vec<Revision> {
let operations = make_database_operations(grid_rev);
let bytes = operations.json_bytes();
let revision = Revision::initial_revision(&grid_rev.database_id, bytes);
vec![revision]
}
impl std::default::Default for DatabaseRevisionPad {
fn default() -> Self {
let database = DatabaseRevision::new(&gen_database_id());
let operations = make_database_operations(&database);
DatabaseRevisionPad {
database_rev: Arc::new(database),
operations,
}
}
}

View File

@ -1,381 +0,0 @@
use crate::errors::{internal_sync_error, SyncError, SyncResult};
use crate::util::cal_diff;
use database_model::{
DatabaseViewRevision, FieldRevision, FieldTypeRevision, FilterRevision,
GroupConfigurationRevision, LayoutRevision, SortRevision,
};
use flowy_sync::util::make_operations_from_revisions;
use lib_infra::util::md5;
use lib_ot::core::{DeltaBuilder, DeltaOperations, EmptyAttributes, OperationTransform};
use revision_model::Revision;
use std::sync::Arc;
pub type DatabaseViewOperations = DeltaOperations<EmptyAttributes>;
pub type DatabaseViewOperationsBuilder = DeltaBuilder;
#[derive(Debug, Clone)]
pub struct DatabaseViewRevisionPad {
view: Arc<DatabaseViewRevision>,
operations: DatabaseViewOperations,
}
impl std::ops::Deref for DatabaseViewRevisionPad {
type Target = DatabaseViewRevision;
fn deref(&self) -> &Self::Target {
&self.view
}
}
impl DatabaseViewRevisionPad {
// For the moment, the view_id is equal to grid_id. The database_id represents the database id.
// A database can be referenced by multiple views.
pub fn new(database_id: String, view_id: String, name: String, layout: LayoutRevision) -> Self {
let view = Arc::new(DatabaseViewRevision::new(
database_id,
view_id,
true,
name,
layout,
));
let json = serde_json::to_string(&view).unwrap();
let operations = DatabaseViewOperationsBuilder::new().insert(&json).build();
Self { view, operations }
}
pub fn from_operations(operations: DatabaseViewOperations) -> SyncResult<Self> {
if operations.is_empty() {
return Err(SyncError::record_not_found().context("Unexpected empty operations"));
}
let s = operations.content()?;
let view: DatabaseViewRevision = serde_json::from_str(&s).map_err(|e| {
let msg = format!("Deserialize operations to GridViewRevision failed: {}", e);
tracing::error!("parsing json: {}", s);
SyncError::internal().context(msg)
})?;
Ok(Self {
view: Arc::new(view),
operations,
})
}
pub fn from_revisions(revisions: Vec<Revision>) -> SyncResult<Self> {
let operations: DatabaseViewOperations = make_operations_from_revisions(revisions)?;
Self::from_operations(operations)
}
pub fn get_groups_by_field_revs(
&self,
field_revs: &[Arc<FieldRevision>],
) -> Vec<Arc<GroupConfigurationRevision>> {
self.groups.get_objects_by_field_revs(field_revs)
}
pub fn get_all_groups(&self) -> Vec<Arc<GroupConfigurationRevision>> {
self.groups.get_all_objects()
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub fn insert_or_update_group_configuration(
&mut self,
field_id: &str,
field_type: &FieldTypeRevision,
group_configuration_rev: GroupConfigurationRevision,
) -> SyncResult<Option<DatabaseViewRevisionChangeset>> {
self.modify(|view| {
// Only save one group
view.groups.clear();
view
.groups
.add_object(field_id, field_type, group_configuration_rev);
Ok(Some(()))
})
}
#[tracing::instrument(level = "trace", skip_all)]
pub fn contains_group(&self, field_id: &str, field_type: &FieldTypeRevision) -> bool {
self.view.groups.get_objects(field_id, field_type).is_some()
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub fn with_mut_group<F: FnOnce(&mut GroupConfigurationRevision)>(
&mut self,
field_id: &str,
field_type: &FieldTypeRevision,
configuration_id: &str,
mut_configuration_fn: F,
) -> SyncResult<Option<DatabaseViewRevisionChangeset>> {
self.modify(
|view| match view.groups.get_mut_objects(field_id, field_type) {
None => Ok(None),
Some(configurations_revs) => {
for configuration_rev in configurations_revs {
if configuration_rev.id == configuration_id {
mut_configuration_fn(Arc::make_mut(configuration_rev));
return Ok(Some(()));
}
}
Ok(None)
},
},
)
}
pub fn delete_group(
&mut self,
group_id: &str,
field_id: &str,
field_type: &FieldTypeRevision,
) -> SyncResult<Option<DatabaseViewRevisionChangeset>> {
self.modify(|view| {
if let Some(groups) = view.groups.get_mut_objects(field_id, field_type) {
groups.retain(|group| group.id != group_id);
Ok(Some(()))
} else {
Ok(None)
}
})
}
pub fn get_all_sorts(&self, _field_revs: &[Arc<FieldRevision>]) -> Vec<Arc<SortRevision>> {
self.sorts.get_all_objects()
}
/// For the moment, a field type only have one filter.
pub fn get_sorts(
&self,
field_id: &str,
field_type_rev: &FieldTypeRevision,
) -> Vec<Arc<SortRevision>> {
self
.sorts
.get_objects(field_id, field_type_rev)
.unwrap_or_default()
}
pub fn get_sort(
&self,
field_id: &str,
field_type_rev: &FieldTypeRevision,
sort_id: &str,
) -> Option<Arc<SortRevision>> {
self
.sorts
.get_object(field_id, field_type_rev, |sort| sort.id == sort_id)
}
pub fn insert_sort(
&mut self,
field_id: &str,
sort_rev: SortRevision,
) -> SyncResult<Option<DatabaseViewRevisionChangeset>> {
self.modify(|view| {
let field_type = sort_rev.field_type;
view.sorts.add_object(field_id, &field_type, sort_rev);
Ok(Some(()))
})
}
pub fn update_sort(
&mut self,
field_id: &str,
sort_rev: SortRevision,
) -> SyncResult<Option<DatabaseViewRevisionChangeset>> {
self.modify(|view| {
if let Some(sort) = view
.sorts
.get_mut_object(field_id, &sort_rev.field_type, |sort| {
sort.id == sort_rev.id
})
{
let sort = Arc::make_mut(sort);
sort.condition = sort_rev.condition;
Ok(Some(()))
} else {
Ok(None)
}
})
}
pub fn delete_sort<T: Into<FieldTypeRevision>>(
&mut self,
sort_id: &str,
field_id: &str,
field_type: T,
) -> SyncResult<Option<DatabaseViewRevisionChangeset>> {
let field_type = field_type.into();
self.modify(|view| {
if let Some(sorts) = view.sorts.get_mut_objects(field_id, &field_type) {
sorts.retain(|sort| sort.id != sort_id);
Ok(Some(()))
} else {
Ok(None)
}
})
}
pub fn delete_all_sorts(&mut self) -> SyncResult<Option<DatabaseViewRevisionChangeset>> {
self.modify(|view| {
view.sorts.clear();
Ok(Some(()))
})
}
pub fn get_all_filters(&self, field_revs: &[Arc<FieldRevision>]) -> Vec<Arc<FilterRevision>> {
self.filters.get_objects_by_field_revs(field_revs)
}
/// For the moment, a field type only have one filter.
pub fn get_filters(
&self,
field_id: &str,
field_type_rev: &FieldTypeRevision,
) -> Vec<Arc<FilterRevision>> {
self
.filters
.get_objects(field_id, field_type_rev)
.unwrap_or_default()
}
pub fn get_filter(
&self,
field_id: &str,
field_type_rev: &FieldTypeRevision,
filter_id: &str,
) -> Option<Arc<FilterRevision>> {
self
.filters
.get_object(field_id, field_type_rev, |filter| filter.id == filter_id)
}
pub fn insert_filter(
&mut self,
field_id: &str,
filter_rev: FilterRevision,
) -> SyncResult<Option<DatabaseViewRevisionChangeset>> {
self.modify(|view| {
let field_type = filter_rev.field_type;
view.filters.add_object(field_id, &field_type, filter_rev);
Ok(Some(()))
})
}
pub fn update_filter(
&mut self,
field_id: &str,
filter_rev: FilterRevision,
) -> SyncResult<Option<DatabaseViewRevisionChangeset>> {
self.modify(|view| {
if let Some(filter) =
view
.filters
.get_mut_object(field_id, &filter_rev.field_type, |filter| {
filter.id == filter_rev.id
})
{
let filter = Arc::make_mut(filter);
filter.condition = filter_rev.condition;
filter.content = filter_rev.content;
Ok(Some(()))
} else {
Ok(None)
}
})
}
pub fn delete_filter<T: Into<FieldTypeRevision>>(
&mut self,
filter_id: &str,
field_id: &str,
field_type: T,
) -> SyncResult<Option<DatabaseViewRevisionChangeset>> {
let field_type = field_type.into();
self.modify(|view| {
if let Some(filters) = view.filters.get_mut_objects(field_id, &field_type) {
filters.retain(|filter| filter.id != filter_id);
Ok(Some(()))
} else {
Ok(None)
}
})
}
/// Returns the settings for the given layout. If it's not exists then will return the
/// default settings for the given layout.
/// Each [database view](https://appflowy.gitbook.io/docs/essential-documentation/contribute-to-appflowy/architecture/frontend/database-view) has its own settings.
pub fn get_layout_setting<T>(&self, layout: &LayoutRevision) -> Option<T>
where
T: serde::de::DeserializeOwned,
{
let settings_str = self.view.layout_settings.get(layout)?;
serde_json::from_str::<T>(settings_str).ok()
}
/// updates the settings for the given layout type
pub fn set_layout_setting<T>(
&mut self,
layout: &LayoutRevision,
settings: &T,
) -> SyncResult<Option<DatabaseViewRevisionChangeset>>
where
T: serde::Serialize,
{
let settings_str = serde_json::to_string(settings).map_err(internal_sync_error)?;
self.modify(|view| {
view.layout_settings.insert(layout.clone(), settings_str);
Ok(Some(()))
})
}
pub fn json_str(&self) -> SyncResult<String> {
make_database_view_rev_json_str(&self.view)
}
pub fn layout(&self) -> LayoutRevision {
self.layout.clone()
}
fn modify<F>(&mut self, f: F) -> SyncResult<Option<DatabaseViewRevisionChangeset>>
where
F: FnOnce(&mut DatabaseViewRevision) -> SyncResult<Option<()>>,
{
let cloned_view = self.view.clone();
match f(Arc::make_mut(&mut self.view))? {
None => Ok(None),
Some(_) => {
let old = make_database_view_rev_json_str(&cloned_view)?;
let new = self.json_str()?;
match cal_diff::<EmptyAttributes>(old, new) {
None => Ok(None),
Some(operations) => {
self.operations = self.operations.compose(&operations)?;
let md5 = md5(&self.operations.json_bytes());
Ok(Some(DatabaseViewRevisionChangeset { operations, md5 }))
},
}
},
}
}
}
#[derive(Debug)]
pub struct DatabaseViewRevisionChangeset {
pub operations: DatabaseViewOperations,
pub md5: String,
}
pub fn make_database_view_rev_json_str(
database_view_rev: &DatabaseViewRevision,
) -> SyncResult<String> {
let json = serde_json::to_string(database_view_rev).map_err(|err| {
internal_sync_error(format!("Serialize grid view to json str failed. {:?}", err))
})?;
Ok(json)
}
pub fn make_database_view_operations(
database_view_rev: &DatabaseViewRevision,
) -> DatabaseViewOperations {
let json = serde_json::to_string(database_view_rev).unwrap();
DatabaseViewOperationsBuilder::new().insert(&json).build()
}

View File

@ -1,9 +0,0 @@
mod block_revision_pad;
mod database_builder;
mod database_revision_pad;
mod database_view_revision_pad;
pub use block_revision_pad::*;
pub use database_builder::*;
pub use database_revision_pad::*;
pub use database_view_revision_pad::*;

View File

@ -1,263 +0,0 @@
use crate::{
client_document::{
history::{History, UndoResult},
view::{ViewExtensions, RECORD_THRESHOLD},
},
errors::SyncError,
};
use bytes::Bytes;
use lib_infra::util::md5;
use lib_ot::text_delta::DeltaTextOperationBuilder;
use lib_ot::{core::*, text_delta::DeltaTextOperations};
use tokio::sync::mpsc;
pub trait InitialDocument {
fn json_str() -> String;
}
pub struct EmptyDocument();
impl InitialDocument for EmptyDocument {
fn json_str() -> String {
DeltaTextOperations::default().json_str()
}
}
pub struct NewlineDocument();
impl InitialDocument for NewlineDocument {
fn json_str() -> String {
initial_delta_document_content()
}
}
pub fn initial_delta_document_content() -> String {
DeltaTextOperationBuilder::new()
.insert("\n")
.build()
.json_str()
}
pub struct ClientDocument {
operations: DeltaTextOperations,
history: History,
view: ViewExtensions,
last_edit_time: usize,
notify: Option<mpsc::UnboundedSender<()>>,
}
impl ClientDocument {
pub fn new<C: InitialDocument>() -> Self {
let content = C::json_str();
Self::from_json(&content).unwrap()
}
pub fn from_operations(operations: DeltaTextOperations) -> Self {
ClientDocument {
operations,
history: History::new(),
view: ViewExtensions::new(),
last_edit_time: 0,
notify: None,
}
}
pub fn from_json(json: &str) -> Result<Self, SyncError> {
let operations = DeltaTextOperations::from_json(json)?;
Ok(Self::from_operations(operations))
}
pub fn get_operations_json(&self) -> String {
self.operations.json_str()
}
pub fn to_bytes(&self) -> Bytes {
self.operations.json_bytes()
}
pub fn to_content(&self) -> String {
self.operations.content().unwrap()
}
pub fn get_operations(&self) -> &DeltaTextOperations {
&self.operations
}
pub fn document_md5(&self) -> String {
let bytes = self.to_bytes();
md5(&bytes)
}
pub fn set_notify(&mut self, notify: mpsc::UnboundedSender<()>) {
self.notify = Some(notify);
}
pub fn set_operations(&mut self, operations: DeltaTextOperations) {
tracing::trace!("document: {}", operations.json_str());
self.operations = operations;
match &self.notify {
None => {},
Some(notify) => {
let _ = notify.send(());
},
}
}
pub fn compose_operations(&mut self, operations: DeltaTextOperations) -> Result<(), SyncError> {
tracing::trace!(
"{} compose {}",
&self.operations.json_str(),
operations.json_str()
);
let composed_operations = self.operations.compose(&operations)?;
let mut undo_operations = operations.invert(&self.operations);
let now = chrono::Utc::now().timestamp_millis() as usize;
if now - self.last_edit_time < RECORD_THRESHOLD {
if let Some(last_operation) = self.history.undo() {
tracing::trace!("compose previous change");
tracing::trace!("current = {}", undo_operations);
tracing::trace!("previous = {}", last_operation);
undo_operations = undo_operations.compose(&last_operation)?;
}
} else {
self.last_edit_time = now;
}
if !undo_operations.is_empty() {
tracing::trace!("add history operations: {}", undo_operations);
self.history.record(undo_operations);
}
self.set_operations(composed_operations);
Ok(())
}
pub fn insert<T: ToString>(
&mut self,
index: usize,
data: T,
) -> Result<DeltaTextOperations, SyncError> {
let text = data.to_string();
let interval = Interval::new(index, index);
validate_interval(&self.operations, &interval)?;
let operations = self.view.insert(&self.operations, &text, interval)?;
self.compose_operations(operations.clone())?;
Ok(operations)
}
pub fn delete(&mut self, interval: Interval) -> Result<DeltaTextOperations, SyncError> {
validate_interval(&self.operations, &interval)?;
debug_assert!(!interval.is_empty());
let operations = self.view.delete(&self.operations, interval)?;
if !operations.is_empty() {
self.compose_operations(operations.clone())?;
}
Ok(operations)
}
pub fn format(
&mut self,
interval: Interval,
attribute: AttributeEntry,
) -> Result<DeltaTextOperations, SyncError> {
validate_interval(&self.operations, &interval)?;
tracing::trace!("format {} with {:?}", interval, attribute);
let operations = self
.view
.format(&self.operations, attribute, interval)
.unwrap();
self.compose_operations(operations.clone())?;
Ok(operations)
}
pub fn replace<T: ToString>(
&mut self,
interval: Interval,
data: T,
) -> Result<DeltaTextOperations, SyncError> {
validate_interval(&self.operations, &interval)?;
let mut operations = DeltaTextOperations::default();
let text = data.to_string();
if !text.is_empty() {
operations = self.view.insert(&self.operations, &text, interval)?;
self.compose_operations(operations.clone())?;
}
if !interval.is_empty() {
let delete = self.delete(interval)?;
operations = operations.compose(&delete)?;
}
Ok(operations)
}
pub fn can_undo(&self) -> bool {
self.history.can_undo()
}
pub fn can_redo(&self) -> bool {
self.history.can_redo()
}
pub fn undo(&mut self) -> Result<UndoResult, SyncError> {
match self.history.undo() {
None => Err(SyncError::undo().context("Undo stack is empty")),
Some(undo_operations) => {
let (new_operations, inverted_operations) = self.invert(&undo_operations)?;
self.set_operations(new_operations);
self.history.add_redo(inverted_operations);
Ok(UndoResult {
operations: undo_operations,
})
},
}
}
pub fn redo(&mut self) -> Result<UndoResult, SyncError> {
match self.history.redo() {
None => Err(SyncError::redo()),
Some(redo_operations) => {
let (new_operations, inverted_operations) = self.invert(&redo_operations)?;
self.set_operations(new_operations);
self.history.add_undo(inverted_operations);
Ok(UndoResult {
operations: redo_operations,
})
},
}
}
pub fn is_empty(&self) -> bool {
// The document is empty if its text is equal to the initial text.
self.operations.json_str() == NewlineDocument::json_str()
}
}
impl ClientDocument {
fn invert(
&self,
operations: &DeltaTextOperations,
) -> Result<(DeltaTextOperations, DeltaTextOperations), SyncError> {
// c = a.compose(b)
// d = b.invert(a)
// a = c.compose(d)
let new_operations = self.operations.compose(operations)?;
let inverted_operations = operations.invert(&self.operations);
Ok((new_operations, inverted_operations))
}
}
fn validate_interval(
operations: &DeltaTextOperations,
interval: &Interval,
) -> Result<(), SyncError> {
if operations.utf16_target_len < interval.end {
tracing::error!(
"{:?} out of bounds. should 0..{}",
interval,
operations.utf16_target_len
);
return Err(SyncError::out_of_bound());
}
Ok(())
}

View File

@ -1,21 +0,0 @@
use crate::client_document::DeleteExt;
use lib_ot::{
core::{DeltaOperationBuilder, Interval},
text_delta::DeltaTextOperations,
};
pub struct DefaultDelete {}
impl DeleteExt for DefaultDelete {
fn ext_name(&self) -> &str {
"DefaultDelete"
}
fn apply(&self, _delta: &DeltaTextOperations, interval: Interval) -> Option<DeltaTextOperations> {
Some(
DeltaOperationBuilder::new()
.retain(interval.start)
.delete(interval.size())
.build(),
)
}
}

View File

@ -1,5 +0,0 @@
mod default_delete;
mod preserve_line_format_merge;
pub use default_delete::*;
pub use preserve_line_format_merge::*;

View File

@ -1,65 +0,0 @@
use crate::{client_document::DeleteExt, util::is_newline};
use lib_ot::{
core::{
DeltaOperationBuilder, Interval, OperationAttributes, OperationIterator, Utf16CodeUnitMetric,
NEW_LINE,
},
text_delta::{empty_attributes, DeltaTextOperations},
};
pub struct PreserveLineFormatOnMerge {}
impl DeleteExt for PreserveLineFormatOnMerge {
fn ext_name(&self) -> &str {
"PreserveLineFormatOnMerge"
}
fn apply(&self, delta: &DeltaTextOperations, interval: Interval) -> Option<DeltaTextOperations> {
if interval.is_empty() {
return None;
}
// seek to the interval start pos. e.g. You backspace enter pos
let mut iter = OperationIterator::from_offset(delta, interval.start);
// op will be the "\n"
let newline_op = iter.next_op_with_len(1)?;
if !is_newline(newline_op.get_data()) {
return None;
}
iter.seek::<Utf16CodeUnitMetric>(interval.size() - 1);
let mut new_delta = DeltaOperationBuilder::new()
.retain(interval.start)
.delete(interval.size())
.build();
while iter.has_next() {
match iter.next() {
None => tracing::error!("op must be not None when has_next() return true"),
Some(op) => {
//
match op.get_data().find(NEW_LINE) {
None => {
new_delta.retain(op.len(), empty_attributes());
continue;
},
Some(line_break) => {
let mut attributes = op.get_attributes();
attributes.remove_all_value();
if newline_op.has_attribute() {
attributes.extend(newline_op.get_attributes());
}
new_delta.retain(line_break, empty_attributes());
new_delta.retain(1, attributes);
break;
},
}
},
}
}
Some(new_delta)
}
}

View File

@ -1,48 +0,0 @@
// use crate::{
// client::extensions::FormatExt,
// core::{Attribute, AttributeKey, Delta, DeltaBuilder, DeltaIter,
// Interval}, };
//
// pub struct FormatLinkAtCaretPositionExt {}
//
// impl FormatExt for FormatLinkAtCaretPositionExt {
// fn ext_name(&self) -> &str {
// std::any::type_name::<FormatLinkAtCaretPositionExt>() }
//
// fn apply(&self, delta: &Delta, interval: Interval, attribute: &Attribute)
// -> Option<Delta> { if attribute.key != AttributeKey::Link ||
// interval.size() != 0 { return None;
// }
//
// let mut iter = DeltaIter::from_offset(delta, interval.start);
// let (before, after) = (iter.next_op_with_len(interval.size()),
// iter.next_op()); let mut start = interval.end;
// let mut retain = 0;
//
// if let Some(before) = before {
// if before.contain_attribute(attribute) {
// start -= before.len();
// retain += before.len();
// }
// }
//
// if let Some(after) = after {
// if after.contain_attribute(attribute) {
// if retain != 0 {
// retain += after.len();
// }
// }
// }
//
// if retain == 0 {
// return None;
// }
//
// Some(
// DeltaBuilder::new()
// .retain(start)
// .retain_with_attributes(retain, (attribute.clone()).into())
// .build(),
// )
// }
// }

View File

@ -1,7 +0,0 @@
pub use format_at_position::*;
pub use resolve_block_format::*;
pub use resolve_inline_format::*;
mod format_at_position;
mod resolve_block_format;
mod resolve_inline_format;

View File

@ -1,63 +0,0 @@
use lib_ot::core::AttributeEntry;
use lib_ot::text_delta::is_block;
use lib_ot::{
core::{DeltaOperationBuilder, Interval, OperationIterator},
text_delta::{empty_attributes, AttributeScope, DeltaTextOperations},
};
use crate::{
client_document::{extensions::helper::line_break, FormatExt},
util::find_newline,
};
pub struct ResolveBlockFormat {}
impl FormatExt for ResolveBlockFormat {
fn ext_name(&self) -> &str {
"ResolveBlockFormat"
}
fn apply(
&self,
delta: &DeltaTextOperations,
interval: Interval,
attribute: &AttributeEntry,
) -> Option<DeltaTextOperations> {
if !is_block(&attribute.key) {
return None;
}
let mut new_delta = DeltaOperationBuilder::new().retain(interval.start).build();
let mut iter = OperationIterator::from_offset(delta, interval.start);
let mut start = 0;
let end = interval.size();
while start < end && iter.has_next() {
let next_op = iter.next_op_with_len(end - start).unwrap();
match find_newline(next_op.get_data()) {
None => new_delta.retain(next_op.len(), empty_attributes()),
Some(_) => {
let tmp_delta = line_break(&next_op, attribute, AttributeScope::Block);
new_delta.extend(tmp_delta);
},
}
start += next_op.len();
}
while iter.has_next() {
let op = iter
.next_op()
.expect("Unexpected None, iter.has_next() must return op");
match find_newline(op.get_data()) {
None => new_delta.retain(op.len(), empty_attributes()),
Some(line_break) => {
new_delta.retain(line_break, empty_attributes());
new_delta.retain(1, attribute.clone().into());
break;
},
}
}
Some(new_delta)
}
}

View File

@ -1,48 +0,0 @@
use lib_ot::core::AttributeEntry;
use lib_ot::text_delta::is_inline;
use lib_ot::{
core::{DeltaOperationBuilder, Interval, OperationIterator},
text_delta::{AttributeScope, DeltaTextOperations},
};
use crate::{
client_document::{extensions::helper::line_break, FormatExt},
util::find_newline,
};
pub struct ResolveInlineFormat {}
impl FormatExt for ResolveInlineFormat {
fn ext_name(&self) -> &str {
"ResolveInlineFormat"
}
fn apply(
&self,
delta: &DeltaTextOperations,
interval: Interval,
attribute: &AttributeEntry,
) -> Option<DeltaTextOperations> {
if !is_inline(&attribute.key) {
return None;
}
let mut new_delta = DeltaOperationBuilder::new().retain(interval.start).build();
let mut iter = OperationIterator::from_offset(delta, interval.start);
let mut start = 0;
let end = interval.size();
while start < end && iter.has_next() {
let next_op = iter.next_op_with_len(end - start).unwrap();
match find_newline(next_op.get_data()) {
None => new_delta.retain(next_op.len(), attribute.clone().into()),
Some(_) => {
let tmp_delta = line_break(&next_op, attribute, AttributeScope::Inline);
new_delta.extend(tmp_delta);
},
}
start += next_op.len();
}
Some(new_delta)
}
}

View File

@ -1,44 +0,0 @@
use crate::util::find_newline;
use lib_ot::core::AttributeEntry;
use lib_ot::text_delta::{
empty_attributes, AttributeScope, DeltaTextOperation, DeltaTextOperations,
};
pub(crate) fn line_break(
op: &DeltaTextOperation,
attribute: &AttributeEntry,
scope: AttributeScope,
) -> DeltaTextOperations {
let mut new_delta = DeltaTextOperations::new();
let mut start = 0;
let end = op.len();
let mut s = op.get_data();
while let Some(line_break) = find_newline(s) {
match scope {
AttributeScope::Inline => {
new_delta.retain(line_break - start, attribute.clone().into());
new_delta.retain(1, empty_attributes());
},
AttributeScope::Block => {
new_delta.retain(line_break - start, empty_attributes());
new_delta.retain(1, attribute.clone().into());
},
_ => {
tracing::error!("Unsupported parser line break for {:?}", scope);
},
}
start = line_break + 1;
s = &s[start..s.len()];
}
if start < end {
match scope {
AttributeScope::Inline => new_delta.retain(end - start, attribute.clone().into()),
AttributeScope::Block => new_delta.retain(end - start, empty_attributes()),
_ => tracing::error!("Unsupported parser line break for {:?}", scope),
}
}
new_delta
}

View File

@ -1,60 +0,0 @@
use crate::{client_document::InsertExt, util::is_newline};
use lib_ot::core::{is_empty_line_at_index, DeltaOperationBuilder, OperationIterator};
use lib_ot::text_delta::{attributes_except_header, BuildInTextAttributeKey, DeltaTextOperations};
pub struct AutoExitBlock {}
impl InsertExt for AutoExitBlock {
fn ext_name(&self) -> &str {
"AutoExitBlock"
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
// Auto exit block will be triggered by enter two new lines
if !is_newline(text) {
return None;
}
if !is_empty_line_at_index(delta, index) {
return None;
}
let mut iter = OperationIterator::from_offset(delta, index);
let next = iter.next_op()?;
let mut attributes = next.get_attributes();
let block_attributes = attributes_except_header(&next);
if block_attributes.is_empty() {
return None;
}
if next.len() > 1 {
return None;
}
match iter.next_op_with_newline() {
None => {},
Some((newline_op, _)) => {
let newline_attributes = attributes_except_header(&newline_op);
if block_attributes == newline_attributes {
return None;
}
},
}
attributes.retain_values(&[BuildInTextAttributeKey::Header.as_ref()]);
Some(
DeltaOperationBuilder::new()
.retain(index + replace_len)
.retain_with_attributes(1, attributes)
.build(),
)
}
}

View File

@ -1,94 +0,0 @@
use crate::{client_document::InsertExt, util::is_whitespace};
use lib_ot::core::AttributeHashMap;
use lib_ot::{
core::{count_utf16_code_units, DeltaOperationBuilder, OperationIterator},
text_delta::{empty_attributes, BuildInTextAttribute, DeltaTextOperations},
};
use std::cmp::min;
use url::Url;
pub struct AutoFormatExt {}
impl InsertExt for AutoFormatExt {
fn ext_name(&self) -> &str {
"AutoFormatExt"
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
// enter whitespace to trigger auto format
if !is_whitespace(text) {
return None;
}
let mut iter = OperationIterator::new(delta);
if let Some(prev) = iter.next_op_with_len(index) {
match AutoFormat::parse(prev.get_data()) {
None => {},
Some(formatter) => {
let mut new_attributes = prev.get_attributes();
// format_len should not greater than index. The url crate will add "/" to the
// end of input string that causes the format_len greater than the input string
let format_len = min(index, formatter.format_len());
let format_attributes = formatter.to_attributes();
format_attributes.iter().for_each(|(k, v)| {
if !new_attributes.contains_key(k) {
new_attributes.insert(k.clone(), v.clone());
}
});
let next_attributes = match iter.next_op() {
None => empty_attributes(),
Some(op) => op.get_attributes(),
};
return Some(
DeltaOperationBuilder::new()
.retain(index + replace_len - min(index, format_len))
.retain_with_attributes(format_len, format_attributes)
.insert_with_attributes(text, next_attributes)
.build(),
);
},
}
}
None
}
}
pub enum AutoFormatter {
Url(Url),
}
impl AutoFormatter {
pub fn to_attributes(&self) -> AttributeHashMap {
match self {
AutoFormatter::Url(url) => BuildInTextAttribute::Link(url.as_str()).into(),
}
}
pub fn format_len(&self) -> usize {
let s = match self {
AutoFormatter::Url(url) => url.to_string(),
};
count_utf16_code_units(&s)
}
}
pub struct AutoFormat {}
impl AutoFormat {
fn parse(s: &str) -> Option<AutoFormatter> {
if let Ok(url) = Url::parse(s) {
return Some(AutoFormatter::Url(url));
}
None
}
}

View File

@ -1,50 +0,0 @@
use crate::client_document::InsertExt;
use lib_ot::core::AttributeHashMap;
use lib_ot::{
core::{DeltaOperationBuilder, OperationAttributes, OperationIterator, NEW_LINE},
text_delta::{BuildInTextAttributeKey, DeltaTextOperations},
};
pub struct DefaultInsertAttribute {}
impl InsertExt for DefaultInsertAttribute {
fn ext_name(&self) -> &str {
"DefaultInsertAttribute"
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
let iter = OperationIterator::new(delta);
let mut attributes = AttributeHashMap::new();
// Enable each line split by "\n" remains the block attributes. for example:
// insert "\n" to "123456" at index 3
//
// [{"insert":"123"},{"insert":"\n","attributes":{"header":1}},
// {"insert":"456"},{"insert":"\n","attributes":{"header":1}}]
if text.ends_with(NEW_LINE) {
match iter.last() {
None => {},
Some(op) => {
if op
.get_attributes()
.contains_key(BuildInTextAttributeKey::Header.as_ref())
{
attributes.extend(op.get_attributes());
}
},
}
}
Some(
DeltaOperationBuilder::new()
.retain(index + replace_len)
.insert_with_attributes(text, attributes)
.build(),
)
}
}

View File

@ -1,49 +0,0 @@
use crate::client_document::InsertExt;
pub use auto_exit_block::*;
pub use auto_format::*;
pub use default_insert::*;
use lib_ot::text_delta::DeltaTextOperations;
pub use preserve_block_format::*;
pub use preserve_inline_format::*;
pub use reset_format_on_new_line::*;
mod auto_exit_block;
mod auto_format;
mod default_insert;
mod preserve_block_format;
mod preserve_inline_format;
mod reset_format_on_new_line;
pub struct InsertEmbedsExt {}
impl InsertExt for InsertEmbedsExt {
fn ext_name(&self) -> &str {
"InsertEmbedsExt"
}
fn apply(
&self,
_delta: &DeltaTextOperations,
_replace_len: usize,
_text: &str,
_index: usize,
) -> Option<DeltaTextOperations> {
None
}
}
pub struct ForceNewlineForInsertsAroundEmbedExt {}
impl InsertExt for ForceNewlineForInsertsAroundEmbedExt {
fn ext_name(&self) -> &str {
"ForceNewlineForInsertsAroundEmbedExt"
}
fn apply(
&self,
_delta: &DeltaTextOperations,
_replace_len: usize,
_text: &str,
_index: usize,
) -> Option<DeltaTextOperations> {
None
}
}

View File

@ -1,72 +0,0 @@
use crate::{client_document::InsertExt, util::is_newline};
use lib_ot::core::AttributeHashMap;
use lib_ot::{
core::{DeltaOperationBuilder, OperationIterator, NEW_LINE},
text_delta::{
attributes_except_header, empty_attributes, BuildInTextAttributeKey, DeltaTextOperations,
},
};
pub struct PreserveBlockFormatOnInsert {}
impl InsertExt for PreserveBlockFormatOnInsert {
fn ext_name(&self) -> &str {
"PreserveBlockFormatOnInsert"
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
if !is_newline(text) {
return None;
}
let mut iter = OperationIterator::from_offset(delta, index);
match iter.next_op_with_newline() {
None => {},
Some((newline_op, offset)) => {
let newline_attributes = newline_op.get_attributes();
let block_attributes = attributes_except_header(&newline_op);
if block_attributes.is_empty() {
return None;
}
let mut reset_attribute = AttributeHashMap::new();
if newline_attributes.contains_key(BuildInTextAttributeKey::Header.as_ref()) {
reset_attribute.insert(BuildInTextAttributeKey::Header, 1);
}
let lines: Vec<_> = text.split(NEW_LINE).collect();
let mut new_delta = DeltaOperationBuilder::new()
.retain(index + replace_len)
.build();
lines.iter().enumerate().for_each(|(i, line)| {
if !line.is_empty() {
new_delta.insert(line, empty_attributes());
}
if i == 0 {
new_delta.insert(NEW_LINE, newline_attributes.clone());
} else if i < lines.len() - 1 {
new_delta.insert(NEW_LINE, block_attributes.clone());
} else {
// do nothing
}
});
if !reset_attribute.is_empty() {
new_delta.retain(offset, empty_attributes());
let len = newline_op.get_data().find(NEW_LINE).unwrap();
new_delta.retain(len, empty_attributes());
new_delta.retain(1, reset_attribute);
}
return Some(new_delta);
},
}
None
}
}

View File

@ -1,109 +0,0 @@
use crate::{
client_document::InsertExt,
util::{contain_newline, is_newline},
};
use lib_ot::{
core::{DeltaOperationBuilder, OpNewline, OperationIterator, NEW_LINE},
text_delta::{empty_attributes, BuildInTextAttributeKey, DeltaTextOperations},
};
pub struct PreserveInlineFormat {}
impl InsertExt for PreserveInlineFormat {
fn ext_name(&self) -> &str {
"PreserveInlineFormat"
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
if contain_newline(text) {
return None;
}
let mut iter = OperationIterator::new(delta);
let prev = iter.next_op_with_len(index)?;
if OpNewline::parse(&prev).is_contain() {
return None;
}
let mut attributes = prev.get_attributes();
if attributes.is_empty() || !attributes.contains_key(BuildInTextAttributeKey::Link.as_ref()) {
return Some(
DeltaOperationBuilder::new()
.retain(index + replace_len)
.insert_with_attributes(text, attributes)
.build(),
);
}
let next = iter.next_op();
match &next {
None => attributes = empty_attributes(),
Some(next) => {
if OpNewline::parse(next).is_equal() {
attributes = empty_attributes();
}
},
}
let new_delta = DeltaOperationBuilder::new()
.retain(index + replace_len)
.insert_with_attributes(text, attributes)
.build();
Some(new_delta)
}
}
pub struct PreserveLineFormatOnSplit {}
impl InsertExt for PreserveLineFormatOnSplit {
fn ext_name(&self) -> &str {
"PreserveLineFormatOnSplit"
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
if !is_newline(text) {
return None;
}
let mut iter = OperationIterator::new(delta);
let prev = iter.next_op_with_len(index)?;
if OpNewline::parse(&prev).is_end() {
return None;
}
let next = iter.next_op()?;
let newline_status = OpNewline::parse(&next);
if newline_status.is_end() {
return None;
}
let mut new_delta = DeltaTextOperations::new();
new_delta.retain(index + replace_len, empty_attributes());
if newline_status.is_contain() {
debug_assert!(!next.has_attribute());
new_delta.insert(NEW_LINE, empty_attributes());
return Some(new_delta);
}
match iter.next_op_with_newline() {
None => {},
Some((newline_op, _)) => {
new_delta.insert(NEW_LINE, newline_op.get_attributes());
},
}
Some(new_delta)
}
}

View File

@ -1,50 +0,0 @@
use crate::{client_document::InsertExt, util::is_newline};
use lib_ot::core::AttributeHashMap;
use lib_ot::{
core::{DeltaOperationBuilder, OperationIterator, Utf16CodeUnitMetric, NEW_LINE},
text_delta::{BuildInTextAttributeKey, DeltaTextOperations},
};
pub struct ResetLineFormatOnNewLine {}
impl InsertExt for ResetLineFormatOnNewLine {
fn ext_name(&self) -> &str {
"ResetLineFormatOnNewLine"
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
if !is_newline(text) {
return None;
}
let mut iter = OperationIterator::new(delta);
iter.seek::<Utf16CodeUnitMetric>(index);
let next_op = iter.next_op()?;
if !next_op.get_data().starts_with(NEW_LINE) {
return None;
}
let mut reset_attribute = AttributeHashMap::new();
if next_op
.get_attributes()
.contains_key(BuildInTextAttributeKey::Header.as_ref())
{
reset_attribute.remove_value(BuildInTextAttributeKey::Header);
}
let len = index + replace_len;
Some(
DeltaOperationBuilder::new()
.retain(len)
.insert_with_attributes(NEW_LINE, next_op.get_attributes())
.retain_with_attributes(1, reset_attribute)
.trim()
.build(),
)
}
}

View File

@ -1,40 +0,0 @@
pub use delete::*;
pub use format::*;
pub use insert::*;
use lib_ot::core::AttributeEntry;
use lib_ot::{core::Interval, text_delta::DeltaTextOperations};
mod delete;
mod format;
mod helper;
mod insert;
pub type InsertExtension = Box<dyn InsertExt + Send + Sync>;
pub type FormatExtension = Box<dyn FormatExt + Send + Sync>;
pub type DeleteExtension = Box<dyn DeleteExt + Send + Sync>;
pub trait InsertExt {
fn ext_name(&self) -> &str;
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations>;
}
pub trait FormatExt {
fn ext_name(&self) -> &str;
fn apply(
&self,
delta: &DeltaTextOperations,
interval: Interval,
attribute: &AttributeEntry,
) -> Option<DeltaTextOperations>;
}
pub trait DeleteExt {
fn ext_name(&self) -> &str;
fn apply(&self, delta: &DeltaTextOperations, interval: Interval) -> Option<DeltaTextOperations>;
}

View File

@ -1,80 +0,0 @@
use lib_ot::text_delta::DeltaTextOperations;
const MAX_UNDOES: usize = 20;
#[derive(Debug, Clone)]
pub struct UndoResult {
pub operations: DeltaTextOperations,
}
#[derive(Debug, Clone)]
pub struct History {
#[allow(dead_code)]
cur_undo: usize,
undoes: Vec<DeltaTextOperations>,
redoes: Vec<DeltaTextOperations>,
capacity: usize,
}
impl std::default::Default for History {
fn default() -> Self {
History {
cur_undo: 1,
undoes: Vec::new(),
redoes: Vec::new(),
capacity: MAX_UNDOES,
}
}
}
impl History {
pub fn new() -> Self {
History::default()
}
pub fn can_undo(&self) -> bool {
!self.undoes.is_empty()
}
pub fn can_redo(&self) -> bool {
!self.redoes.is_empty()
}
pub fn add_undo(&mut self, delta: DeltaTextOperations) {
self.undoes.push(delta);
}
pub fn add_redo(&mut self, delta: DeltaTextOperations) {
self.redoes.push(delta);
}
pub fn record(&mut self, delta: DeltaTextOperations) {
if delta.ops.is_empty() {
return;
}
self.redoes.clear();
self.add_undo(delta);
if self.undoes.len() > self.capacity {
self.undoes.remove(0);
}
}
pub fn undo(&mut self) -> Option<DeltaTextOperations> {
if !self.can_undo() {
return None;
}
let delta = self.undoes.pop().unwrap();
Some(delta)
}
pub fn redo(&mut self) -> Option<DeltaTextOperations> {
if !self.can_redo() {
return None;
}
let delta = self.redoes.pop().unwrap();
Some(delta)
}
}

View File

@ -1,10 +0,0 @@
#![allow(clippy::module_inception)]
pub use document_pad::*;
pub(crate) use extensions::*;
pub use view::*;
mod document_pad;
mod extensions;
pub mod history;
mod view;

View File

@ -1,119 +0,0 @@
use crate::client_document::*;
use lib_ot::core::AttributeEntry;
use lib_ot::{
core::{trim, Interval},
errors::{ErrorBuilder, OTError, OTErrorCode},
text_delta::DeltaTextOperations,
};
pub const RECORD_THRESHOLD: usize = 400; // in milliseconds
pub struct ViewExtensions {
insert_exts: Vec<InsertExtension>,
format_exts: Vec<FormatExtension>,
delete_exts: Vec<DeleteExtension>,
}
impl ViewExtensions {
pub(crate) fn new() -> Self {
Self {
insert_exts: construct_insert_exts(),
format_exts: construct_format_exts(),
delete_exts: construct_delete_exts(),
}
}
pub(crate) fn insert(
&self,
operations: &DeltaTextOperations,
text: &str,
interval: Interval,
) -> Result<DeltaTextOperations, OTError> {
let mut new_operations = None;
for ext in &self.insert_exts {
if let Some(mut operations) = ext.apply(operations, interval.size(), text, interval.start) {
trim(&mut operations);
tracing::trace!("[{}] applied, delta: {}", ext.ext_name(), operations);
new_operations = Some(operations);
break;
}
}
match new_operations {
None => Err(ErrorBuilder::new(OTErrorCode::ApplyInsertFail).build()),
Some(new_operations) => Ok(new_operations),
}
}
pub(crate) fn delete(
&self,
delta: &DeltaTextOperations,
interval: Interval,
) -> Result<DeltaTextOperations, OTError> {
let mut new_delta = None;
for ext in &self.delete_exts {
if let Some(mut delta) = ext.apply(delta, interval) {
trim(&mut delta);
tracing::trace!("[{}] applied, delta: {}", ext.ext_name(), delta);
new_delta = Some(delta);
break;
}
}
match new_delta {
None => Err(ErrorBuilder::new(OTErrorCode::ApplyDeleteFail).build()),
Some(new_delta) => Ok(new_delta),
}
}
pub(crate) fn format(
&self,
operations: &DeltaTextOperations,
attribute: AttributeEntry,
interval: Interval,
) -> Result<DeltaTextOperations, OTError> {
let mut new_operations = None;
for ext in &self.format_exts {
if let Some(mut operations) = ext.apply(operations, interval, &attribute) {
trim(&mut operations);
tracing::trace!("[{}] applied, delta: {}", ext.ext_name(), operations);
new_operations = Some(operations);
break;
}
}
match new_operations {
None => Err(ErrorBuilder::new(OTErrorCode::ApplyFormatFail).build()),
Some(new_operations) => Ok(new_operations),
}
}
}
fn construct_insert_exts() -> Vec<InsertExtension> {
vec![
Box::new(InsertEmbedsExt {}),
Box::new(ForceNewlineForInsertsAroundEmbedExt {}),
Box::new(AutoExitBlock {}),
Box::new(PreserveBlockFormatOnInsert {}),
Box::new(PreserveLineFormatOnSplit {}),
Box::new(ResetLineFormatOnNewLine {}),
Box::new(AutoFormatExt {}),
Box::new(PreserveInlineFormat {}),
Box::new(DefaultInsertAttribute {}),
]
}
fn construct_format_exts() -> Vec<FormatExtension> {
vec![
// Box::new(FormatLinkAtCaretPositionExt {}),
Box::new(ResolveBlockFormat {}),
Box::new(ResolveInlineFormat {}),
]
}
fn construct_delete_exts() -> Vec<DeleteExtension> {
vec![
Box::new(PreserveLineFormatOnMerge {}),
Box::new(DefaultDelete {}),
]
}

View File

@ -1,49 +0,0 @@
use crate::client_folder::FolderOperations;
use crate::{
client_folder::{default_folder_operations, FolderPad},
errors::SyncResult,
};
use flowy_sync::util::make_operations_from_revisions;
use folder_model::{TrashRevision, WorkspaceRevision};
use revision_model::Revision;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)]
pub(crate) struct FolderPadBuilder {
workspaces: Vec<WorkspaceRevision>,
trash: Vec<TrashRevision>,
}
impl FolderPadBuilder {
pub(crate) fn new() -> Self {
Self {
workspaces: vec![],
trash: vec![],
}
}
#[allow(dead_code)]
pub(crate) fn with_workspace(mut self, workspaces: Vec<WorkspaceRevision>) -> Self {
self.workspaces = workspaces;
self
}
#[allow(dead_code)]
pub(crate) fn with_trash(mut self, trash: Vec<TrashRevision>) -> Self {
self.trash = trash;
self
}
pub(crate) fn build_with_revisions(self, revisions: Vec<Revision>) -> SyncResult<FolderPad> {
let mut operations: FolderOperations = make_operations_from_revisions(revisions)?;
if operations.is_empty() {
operations = default_folder_operations();
}
FolderPad::from_operations(operations)
}
#[allow(dead_code)]
pub(crate) fn build(self) -> SyncResult<FolderPad> {
FolderPad::new(self.workspaces, self.trash)
}
}

View File

@ -1,147 +0,0 @@
use crate::client_folder::trash_node::TrashNode;
use crate::client_folder::workspace_node::WorkspaceNode;
use crate::errors::{SyncError, SyncResult};
use flowy_derive::Node;
use lib_ot::core::NodeTree;
use lib_ot::core::*;
use parking_lot::RwLock;
use std::sync::Arc;
pub type AtomicNodeTree = RwLock<NodeTree>;
pub struct FolderNodePad {
pub tree: Arc<AtomicNodeTree>,
pub node_id: NodeId,
pub workspaces: WorkspaceList,
pub trash: TrashList,
}
#[derive(Clone, Node)]
#[node_type = "workspaces"]
pub struct WorkspaceList {
pub tree: Arc<AtomicNodeTree>,
pub node_id: Option<NodeId>,
#[node(child_name = "workspace")]
inner: Vec<WorkspaceNode>,
}
impl std::ops::Deref for WorkspaceList {
type Target = Vec<WorkspaceNode>;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl std::ops::DerefMut for WorkspaceList {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
#[derive(Clone, Node)]
#[node_type = "trash"]
pub struct TrashList {
pub tree: Arc<AtomicNodeTree>,
pub node_id: Option<NodeId>,
#[node(child_name = "trash")]
inner: Vec<TrashNode>,
}
impl FolderNodePad {
pub fn new() -> Self {
Self::default()
}
pub fn get_workspace(&self, workspace_id: &str) -> Option<&WorkspaceNode> {
self
.workspaces
.iter()
.find(|workspace| workspace.id == workspace_id)
}
pub fn get_mut_workspace(&mut self, workspace_id: &str) -> Option<&mut WorkspaceNode> {
self
.workspaces
.iter_mut()
.find(|workspace| workspace.id == workspace_id)
}
pub fn add_workspace(&mut self, mut workspace: WorkspaceNode) {
let path = workspaces_path().clone_with(self.workspaces.len());
let op = NodeOperation::Insert {
path: path.clone(),
nodes: vec![workspace.to_node_data()],
};
self.tree.write().apply_op(op).unwrap();
let node_id = self.tree.read().node_id_at_path(path).unwrap();
workspace.node_id = Some(node_id);
self.workspaces.push(workspace);
}
pub fn to_json(&self, pretty: bool) -> SyncResult<String> {
self
.tree
.read()
.to_json(pretty)
.map_err(|e| SyncError::serde().context(e))
}
}
impl std::default::Default for FolderNodePad {
fn default() -> Self {
let tree = Arc::new(RwLock::new(NodeTree::default()));
// Workspace
let mut workspaces = WorkspaceList {
tree: tree.clone(),
node_id: None,
inner: vec![],
};
let workspace_node = workspaces.to_node_data();
// Trash
let mut trash = TrashList {
tree: tree.clone(),
node_id: None,
inner: vec![],
};
let trash_node = trash.to_node_data();
let folder_node = NodeDataBuilder::new("folder")
.add_node_data(workspace_node)
.add_node_data(trash_node)
.build();
let operation = NodeOperation::Insert {
path: folder_path(),
nodes: vec![folder_node],
};
tree.write().apply_op(operation).unwrap();
let node_id = tree.read().node_id_at_path(folder_path()).unwrap();
workspaces.node_id = Some(tree.read().node_id_at_path(workspaces_path()).unwrap());
trash.node_id = Some(tree.read().node_id_at_path(trash_path()).unwrap());
Self {
tree,
node_id,
workspaces,
trash,
}
}
}
fn folder_path() -> Path {
vec![0].into()
}
fn workspaces_path() -> Path {
folder_path().clone_with(0)
}
fn trash_path() -> Path {
folder_path().clone_with(1)
}

View File

@ -1,985 +0,0 @@
use crate::errors::internal_sync_error;
use crate::util::cal_diff;
use crate::{
client_folder::builder::FolderPadBuilder,
errors::{SyncError, SyncResult},
};
use folder_model::{AppRevision, FolderRevision, TrashRevision, ViewRevision, WorkspaceRevision};
use lib_infra::util::md5;
use lib_infra::util::move_vec_element;
use lib_ot::core::*;
use revision_model::Revision;
use serde::Deserialize;
use std::sync::Arc;
pub type FolderOperations = DeltaOperations<EmptyAttributes>;
pub type FolderOperationsBuilder = DeltaOperationBuilder<EmptyAttributes>;
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct FolderPad {
folder_rev: FolderRevision,
operations: FolderOperations,
}
impl FolderPad {
pub fn new(workspaces: Vec<WorkspaceRevision>, trash: Vec<TrashRevision>) -> SyncResult<Self> {
let folder_rev = FolderRevision {
workspaces: workspaces.into_iter().map(Arc::new).collect(),
trash: trash.into_iter().map(Arc::new).collect(),
};
Self::from_folder_rev(folder_rev)
}
pub fn from_folder_rev(folder_rev: FolderRevision) -> SyncResult<Self> {
let json = serde_json::to_string(&folder_rev).map_err(|e| {
SyncError::internal().context(format!("Serialize to folder json str failed: {}", e))
})?;
let operations = FolderOperationsBuilder::new().insert(&json).build();
Ok(Self {
folder_rev,
operations,
})
}
pub fn from_revisions(revisions: Vec<Revision>) -> SyncResult<Self> {
FolderPadBuilder::new().build_with_revisions(revisions)
}
pub fn from_operations(operations: FolderOperations) -> SyncResult<Self> {
let content = operations.content()?;
let mut deserializer = serde_json::Deserializer::from_reader(content.as_bytes());
let folder_rev = FolderRevision::deserialize(&mut deserializer).map_err(|e| {
tracing::error!("Deserialize folder from {} failed", content);
SyncError::internal().context(format!("Deserialize operations to folder failed: {}", e))
})?;
Ok(Self {
folder_rev,
operations,
})
}
pub fn get_operations(&self) -> &FolderOperations {
&self.operations
}
pub fn reset_folder(&mut self, operations: FolderOperations) -> SyncResult<String> {
let folder = FolderPad::from_operations(operations)?;
self.folder_rev = folder.folder_rev;
self.operations = folder.operations;
Ok(self.folder_md5())
}
pub fn compose_remote_operations(&mut self, operations: FolderOperations) -> SyncResult<String> {
let composed_operations = self.operations.compose(&operations)?;
self.reset_folder(composed_operations)
}
pub fn is_empty(&self) -> bool {
self.folder_rev.workspaces.is_empty() && self.folder_rev.trash.is_empty()
}
#[tracing::instrument(level = "trace", skip(self, workspace_rev), fields(workspace_name=%workspace_rev.name), err)]
pub fn create_workspace(
&mut self,
workspace_rev: WorkspaceRevision,
) -> SyncResult<Option<FolderChangeset>> {
let workspace = Arc::new(workspace_rev);
if self.folder_rev.workspaces.contains(&workspace) {
tracing::warn!("[RootFolder]: Duplicate workspace");
return Ok(None);
}
self.modify_workspaces(move |workspaces| {
workspaces.push(workspace);
Ok(Some(()))
})
}
pub fn update_workspace(
&mut self,
workspace_id: &str,
name: Option<String>,
desc: Option<String>,
) -> SyncResult<Option<FolderChangeset>> {
self.with_workspace(workspace_id, |workspace| {
if let Some(name) = name {
workspace.name = name;
}
if let Some(desc) = desc {
workspace.desc = desc;
}
Ok(Some(()))
})
}
pub fn read_workspaces(
&self,
workspace_id: Option<String>,
) -> SyncResult<Vec<WorkspaceRevision>> {
match workspace_id {
None => {
let workspaces = self
.folder_rev
.workspaces
.iter()
.map(|workspace| workspace.as_ref().clone())
.collect::<Vec<WorkspaceRevision>>();
Ok(workspaces)
},
Some(workspace_id) => {
if let Some(workspace) = self
.folder_rev
.workspaces
.iter()
.find(|workspace| workspace.id == workspace_id)
{
Ok(vec![workspace.as_ref().clone()])
} else {
Err(
SyncError::record_not_found()
.context(format!("Can't find workspace with id {}", workspace_id)),
)
}
},
}
}
#[tracing::instrument(level = "trace", skip(self), err)]
pub fn delete_workspace(&mut self, workspace_id: &str) -> SyncResult<Option<FolderChangeset>> {
self.modify_workspaces(|workspaces| {
workspaces.retain(|w| w.id != workspace_id);
Ok(Some(()))
})
}
#[tracing::instrument(level = "trace", skip(self), fields(app_name=%app_rev.name), err)]
pub fn create_app(&mut self, app_rev: AppRevision) -> SyncResult<Option<FolderChangeset>> {
let workspace_id = app_rev.workspace_id.clone();
self.with_workspace(&workspace_id, move |workspace| {
if workspace.apps.contains(&app_rev) {
tracing::warn!("[RootFolder]: Duplicate app");
return Ok(None);
}
workspace.apps.push(app_rev);
Ok(Some(()))
})
}
pub fn read_app(&self, app_id: &str) -> SyncResult<AppRevision> {
for workspace in &self.folder_rev.workspaces {
if let Some(app) = workspace.apps.iter().find(|app| app.id == app_id) {
return Ok(app.clone());
}
}
Err(SyncError::record_not_found().context(format!("Can't find app with id {}", app_id)))
}
pub fn update_app(
&mut self,
app_id: &str,
name: Option<String>,
desc: Option<String>,
) -> SyncResult<Option<FolderChangeset>> {
self.with_app(app_id, move |app| {
if let Some(name) = name {
app.name = name;
}
if let Some(desc) = desc {
app.desc = desc;
}
Ok(Some(()))
})
}
#[tracing::instrument(level = "trace", skip(self), err)]
pub fn delete_app(&mut self, app_id: &str) -> SyncResult<Option<FolderChangeset>> {
let app = self.read_app(app_id)?;
self.with_workspace(&app.workspace_id, |workspace| {
workspace.apps.retain(|app| app.id != app_id);
Ok(Some(()))
})
}
#[tracing::instrument(level = "trace", skip(self), err)]
pub fn move_app(
&mut self,
app_id: &str,
from: usize,
to: usize,
) -> SyncResult<Option<FolderChangeset>> {
let app = self.read_app(app_id)?;
self.with_workspace(&app.workspace_id, |workspace| {
match move_vec_element(&mut workspace.apps, |app| app.id == app_id, from, to)
.map_err(internal_sync_error)?
{
true => Ok(Some(())),
false => Ok(None),
}
})
}
#[tracing::instrument(level = "trace", skip(self), fields(view_name=%view_rev.name), err)]
pub fn create_view(&mut self, view_rev: ViewRevision) -> SyncResult<Option<FolderChangeset>> {
let app_id = view_rev.app_id.clone();
self.with_app(&app_id, move |app| {
if app.belongings.contains(&view_rev) {
tracing::warn!("[RootFolder]: Duplicate view");
return Ok(None);
}
app.belongings.push(view_rev);
Ok(Some(()))
})
}
pub fn read_view(&self, view_id: &str) -> SyncResult<ViewRevision> {
for workspace in &self.folder_rev.workspaces {
for app in &(*workspace.apps) {
if let Some(view) = app.belongings.iter().find(|b| b.id == view_id) {
return Ok(view.clone());
}
}
}
Err(SyncError::record_not_found().context(format!("Can't find view with id {}", view_id)))
}
pub fn read_views(&self, belong_to_id: &str) -> SyncResult<Vec<ViewRevision>> {
for workspace in &self.folder_rev.workspaces {
for app in &(*workspace.apps) {
if app.id == belong_to_id {
return Ok(app.belongings.to_vec());
}
}
}
Ok(vec![])
}
pub fn update_view(
&mut self,
view_id: &str,
name: Option<String>,
desc: Option<String>,
modified_time: i64,
) -> SyncResult<Option<FolderChangeset>> {
let view = self.read_view(view_id)?;
self.with_view(&view.app_id, view_id, |view| {
if let Some(name) = name {
view.name = name;
}
if let Some(desc) = desc {
view.desc = desc;
}
view.modified_time = modified_time;
Ok(Some(()))
})
}
#[tracing::instrument(level = "trace", skip(self), err)]
pub fn delete_view(
&mut self,
app_id: &str,
view_id: &str,
) -> SyncResult<Option<FolderChangeset>> {
self.with_app(app_id, |app| {
app.belongings.retain(|view| view.id != view_id);
Ok(Some(()))
})
}
#[tracing::instrument(level = "trace", skip(self), err)]
pub fn move_view(
&mut self,
view_id: &str,
from: usize,
to: usize,
) -> SyncResult<Option<FolderChangeset>> {
let view = self.read_view(view_id)?;
self.with_app(&view.app_id, |app| {
match move_vec_element(&mut app.belongings, |view| view.id == view_id, from, to)
.map_err(internal_sync_error)?
{
true => Ok(Some(())),
false => Ok(None),
}
})
}
pub fn create_trash(&mut self, trash: Vec<TrashRevision>) -> SyncResult<Option<FolderChangeset>> {
self.with_trash(|original_trash| {
let mut new_trash = trash
.into_iter()
.flat_map(|new_trash| {
if original_trash
.iter()
.any(|old_trash| old_trash.id == new_trash.id)
{
None
} else {
Some(Arc::new(new_trash))
}
})
.collect::<Vec<Arc<TrashRevision>>>();
if new_trash.is_empty() {
Ok(None)
} else {
original_trash.append(&mut new_trash);
Ok(Some(()))
}
})
}
pub fn read_trash(&self, trash_id: Option<String>) -> SyncResult<Vec<TrashRevision>> {
match trash_id {
None => {
// Removes the duplicate items if exist
let mut trash_items = Vec::<TrashRevision>::with_capacity(self.folder_rev.trash.len());
for trash_item in self.folder_rev.trash.iter() {
if !trash_items.iter().any(|item| item.id == trash_item.id) {
trash_items.push(trash_item.as_ref().clone());
}
}
Ok(trash_items)
},
Some(trash_id) => match self.folder_rev.trash.iter().find(|t| t.id == trash_id) {
Some(trash) => Ok(vec![trash.as_ref().clone()]),
None => Ok(vec![]),
},
}
}
pub fn delete_trash(
&mut self,
trash_ids: Option<Vec<String>>,
) -> SyncResult<Option<FolderChangeset>> {
match trash_ids {
None => self.with_trash(|trash| {
trash.clear();
Ok(Some(()))
}),
Some(trash_ids) => self.with_trash(|trash| {
trash.retain(|t| !trash_ids.contains(&t.id));
Ok(Some(()))
}),
}
}
pub fn folder_md5(&self) -> String {
md5(&self.operations.json_bytes())
}
pub fn to_json(&self) -> SyncResult<String> {
make_folder_rev_json_str(&self.folder_rev)
}
}
pub fn make_folder_rev_json_str(folder_rev: &FolderRevision) -> SyncResult<String> {
let json = serde_json::to_string(folder_rev).map_err(|err| {
internal_sync_error(format!("Serialize folder to json str failed. {:?}", err))
})?;
Ok(json)
}
impl FolderPad {
fn modify_workspaces<F>(&mut self, f: F) -> SyncResult<Option<FolderChangeset>>
where
F: FnOnce(&mut Vec<Arc<WorkspaceRevision>>) -> SyncResult<Option<()>>,
{
let cloned_self = self.clone();
match f(&mut self.folder_rev.workspaces)? {
None => Ok(None),
Some(_) => {
let old = cloned_self.to_json()?;
let new = self.to_json()?;
match cal_diff::<EmptyAttributes>(old, new) {
None => Ok(None),
Some(operations) => {
self.operations = self.operations.compose(&operations)?;
Ok(Some(FolderChangeset {
operations,
md5: self.folder_md5(),
}))
},
}
},
}
}
fn with_workspace<F>(&mut self, workspace_id: &str, f: F) -> SyncResult<Option<FolderChangeset>>
where
F: FnOnce(&mut WorkspaceRevision) -> SyncResult<Option<()>>,
{
self.modify_workspaces(|workspaces| {
if let Some(workspace) = workspaces
.iter_mut()
.find(|workspace| workspace_id == workspace.id)
{
f(Arc::make_mut(workspace))
} else {
tracing::warn!(
"[FolderPad]: Can't find any workspace with id: {}",
workspace_id
);
Ok(None)
}
})
}
fn with_trash<F>(&mut self, f: F) -> SyncResult<Option<FolderChangeset>>
where
F: FnOnce(&mut Vec<Arc<TrashRevision>>) -> SyncResult<Option<()>>,
{
let cloned_self = self.clone();
match f(&mut self.folder_rev.trash)? {
None => Ok(None),
Some(_) => {
let old = cloned_self.to_json()?;
let new = self.to_json()?;
match cal_diff::<EmptyAttributes>(old, new) {
None => Ok(None),
Some(operations) => {
self.operations = self.operations.compose(&operations)?;
Ok(Some(FolderChangeset {
operations,
md5: self.folder_md5(),
}))
},
}
},
}
}
fn with_app<F>(&mut self, app_id: &str, f: F) -> SyncResult<Option<FolderChangeset>>
where
F: FnOnce(&mut AppRevision) -> SyncResult<Option<()>>,
{
let workspace_id = match self
.folder_rev
.workspaces
.iter()
.find(|workspace| workspace.apps.iter().any(|app| app.id == app_id))
{
None => {
tracing::warn!("[FolderPad]: Can't find any app with id: {}", app_id);
return Ok(None);
},
Some(workspace) => workspace.id.clone(),
};
self.with_workspace(&workspace_id, |workspace| {
// It's ok to unwrap because we get the workspace from the app_id.
f(workspace
.apps
.iter_mut()
.find(|app| app_id == app.id)
.unwrap())
})
}
fn with_view<F>(
&mut self,
belong_to_id: &str,
view_id: &str,
f: F,
) -> SyncResult<Option<FolderChangeset>>
where
F: FnOnce(&mut ViewRevision) -> SyncResult<Option<()>>,
{
self.with_app(belong_to_id, |app| {
match app.belongings.iter_mut().find(|view| view_id == view.id) {
None => {
tracing::warn!("[FolderPad]: Can't find any view with id: {}", view_id);
Ok(None)
},
Some(view) => f(view),
}
})
}
}
pub fn default_folder_operations() -> FolderOperations {
FolderOperationsBuilder::new()
.insert(r#"{"workspaces":[],"trash":[]}"#)
.build()
}
pub fn initial_folder_operations(folder_pad: &FolderPad) -> SyncResult<FolderOperations> {
let json = folder_pad.to_json()?;
let operations = FolderOperationsBuilder::new().insert(&json).build();
Ok(operations)
}
pub struct FolderChangeset {
pub operations: FolderOperations,
/// md5: the md5 of the FolderPad's operations after applying the change.
pub md5: String,
}
#[cfg(test)]
mod tests {
#![allow(clippy::all)]
use crate::client_folder::folder_pad::FolderPad;
use crate::client_folder::{FolderOperations, FolderOperationsBuilder};
use chrono::Utc;
use folder_model::{AppRevision, FolderRevision, TrashRevision, ViewRevision, WorkspaceRevision};
use lib_ot::core::OperationTransform;
use serde::Deserialize;
#[test]
fn folder_add_workspace() {
let (mut folder, initial_operations, _) = test_folder();
let _time = Utc::now();
let mut workspace_1 = WorkspaceRevision::default();
workspace_1.name = "My first workspace".to_owned();
let operations_1 = folder
.create_workspace(workspace_1)
.unwrap()
.unwrap()
.operations;
let mut workspace_2 = WorkspaceRevision::default();
workspace_2.name = "My second workspace".to_owned();
let operations_2 = folder
.create_workspace(workspace_2)
.unwrap()
.unwrap()
.operations;
let folder_from_operations =
make_folder_from_operations(initial_operations, vec![operations_1, operations_2]);
assert_eq!(folder, folder_from_operations);
}
#[test]
fn folder_deserialize_invalid_json_test() {
for json in vec![
// No timestamp
r#"{"workspaces":[{"id":"1","name":"first workspace","desc":"","apps":[]}],"trash":[]}"#,
// Trailing characters
r#"{"workspaces":[{"id":"1","name":"first workspace","desc":"","apps":[]}],"trash":[]}123"#,
] {
let mut deserializer = serde_json::Deserializer::from_reader(json.as_bytes());
let folder_rev = FolderRevision::deserialize(&mut deserializer).unwrap();
assert_eq!(
folder_rev.workspaces.first().as_ref().unwrap().name,
"first workspace"
);
}
}
#[test]
fn folder_update_workspace() {
let (mut folder, initial_operation, workspace) = test_folder();
assert_folder_equal(
&folder,
&make_folder_from_operations(initial_operation.clone(), vec![]),
r#"{"workspaces":[{"id":"1","name":"😁 my first workspace","desc":"","apps":[],"modified_time":0,"create_time":0}],"trash":[]}"#,
);
let operations = folder
.update_workspace(&workspace.id, Some("☺️ rename workspace".to_string()), None)
.unwrap()
.unwrap()
.operations;
let folder_from_operations = make_folder_from_operations(initial_operation, vec![operations]);
assert_folder_equal(
&folder,
&folder_from_operations,
r#"{"workspaces":[{"id":"1","name":"☺️ rename workspace","desc":"","apps":[],"modified_time":0,"create_time":0}],"trash":[]}"#,
);
}
#[test]
fn folder_add_app() {
let (folder, initial_operations, _app) = test_app_folder();
let folder_from_operations = make_folder_from_operations(initial_operations, vec![]);
assert_eq!(folder, folder_from_operations);
assert_folder_equal(
&folder,
&folder_from_operations,
r#"{
"workspaces": [
{
"id": "1",
"name": "😁 my first workspace",
"desc": "",
"apps": [
{
"id": "",
"workspace_id": "1",
"name": "😁 my first app",
"desc": "",
"belongings": [],
"version": 0,
"modified_time": 0,
"create_time": 0
}
],
"modified_time": 0,
"create_time": 0
}
],
"trash": []
}"#,
);
}
#[test]
fn folder_update_app() {
let (mut folder, initial_operations, app) = test_app_folder();
let operations = folder
.update_app(&app.id, Some("🤪 rename app".to_owned()), None)
.unwrap()
.unwrap()
.operations;
let new_folder = make_folder_from_operations(initial_operations, vec![operations]);
assert_folder_equal(
&folder,
&new_folder,
r#"{
"workspaces": [
{
"id": "1",
"name": "😁 my first workspace",
"desc": "",
"apps": [
{
"id": "",
"workspace_id": "1",
"name": "🤪 rename app",
"desc": "",
"belongings": [],
"version": 0,
"modified_time": 0,
"create_time": 0
}
],
"modified_time": 0,
"create_time": 0
}
],
"trash": []
}"#,
);
}
#[test]
fn folder_delete_app() {
let (mut folder, initial_operations, app) = test_app_folder();
let operations = folder.delete_app(&app.id).unwrap().unwrap().operations;
let new_folder = make_folder_from_operations(initial_operations, vec![operations]);
assert_folder_equal(
&folder,
&new_folder,
r#"{
"workspaces": [
{
"id": "1",
"name": "😁 my first workspace",
"desc": "",
"apps": [],
"modified_time": 0,
"create_time": 0
}
],
"trash": []
}"#,
);
}
#[test]
fn folder_add_view() {
let (folder, initial_operations, _view) = test_view_folder();
assert_folder_equal(
&folder,
&make_folder_from_operations(initial_operations, vec![]),
r#"
{
"workspaces": [
{
"id": "1",
"name": "😁 my first workspace",
"desc": "",
"apps": [
{
"id": "",
"workspace_id": "1",
"name": "😁 my first app",
"desc": "",
"belongings": [
{
"id": "",
"belong_to_id": "",
"name": "🎃 my first view",
"desc": "",
"view_type": "Blank",
"version": 0,
"belongings": [],
"modified_time": 0,
"create_time": 0
}
],
"version": 0,
"modified_time": 0,
"create_time": 0
}
],
"modified_time": 0,
"create_time": 0
}
],
"trash": []
}"#,
);
}
#[test]
fn folder_update_view() {
let (mut folder, initial_operations, view) = test_view_folder();
let operations = folder
.update_view(&view.id, Some("😦 rename view".to_owned()), None, 123)
.unwrap()
.unwrap()
.operations;
let new_folder = make_folder_from_operations(initial_operations, vec![operations]);
assert_folder_equal(
&folder,
&new_folder,
r#"{
"workspaces": [
{
"id": "1",
"name": "😁 my first workspace",
"desc": "",
"apps": [
{
"id": "",
"workspace_id": "1",
"name": "😁 my first app",
"desc": "",
"belongings": [
{
"id": "",
"belong_to_id": "",
"name": "😦 rename view",
"desc": "",
"view_type": "Blank",
"version": 0,
"belongings": [],
"modified_time": 123,
"create_time": 0
}
],
"version": 0,
"modified_time": 0,
"create_time": 0
}
],
"modified_time": 0,
"create_time": 0
}
],
"trash": []
}"#,
);
}
#[test]
fn folder_delete_view() {
let (mut folder, initial_operations, view) = test_view_folder();
let operations = folder
.delete_view(&view.app_id, &view.id)
.unwrap()
.unwrap()
.operations;
let new_folder = make_folder_from_operations(initial_operations, vec![operations]);
assert_folder_equal(
&folder,
&new_folder,
r#"{
"workspaces": [
{
"id": "1",
"name": "😁 my first workspace",
"desc": "",
"apps": [
{
"id": "",
"workspace_id": "1",
"name": "😁 my first app",
"desc": "",
"belongings": [],
"version": 0,
"modified_time": 0,
"create_time": 0
}
],
"modified_time": 0,
"create_time": 0
}
],
"trash": []
}"#,
);
}
#[test]
fn folder_add_trash() {
let (folder, initial_operations, _trash) = test_trash();
assert_folder_equal(
&folder,
&make_folder_from_operations(initial_operations, vec![]),
r#"{
"workspaces": [],
"trash": [
{
"id": "1",
"name": "🚽 my first trash",
"modified_time": 0,
"create_time": 0,
"ty": 0
}
]
}
"#,
);
}
#[test]
fn folder_delete_trash() {
let (mut folder, initial_operations, trash) = test_trash();
let operations = folder
.delete_trash(Some(vec![trash.id]))
.unwrap()
.unwrap()
.operations;
assert_folder_equal(
&folder,
&make_folder_from_operations(initial_operations, vec![operations]),
r#"{
"workspaces": [],
"trash": []
}
"#,
);
}
fn test_folder() -> (FolderPad, FolderOperations, WorkspaceRevision) {
let folder_rev = FolderRevision::default();
let folder_json = serde_json::to_string(&folder_rev).unwrap();
let mut operations = FolderOperationsBuilder::new().insert(&folder_json).build();
let mut workspace_rev = WorkspaceRevision::default();
workspace_rev.name = "😁 my first workspace".to_owned();
workspace_rev.id = "1".to_owned();
let mut folder = FolderPad::from_folder_rev(folder_rev).unwrap();
operations = operations
.compose(
&folder
.create_workspace(workspace_rev.clone())
.unwrap()
.unwrap()
.operations,
)
.unwrap();
(folder, operations, workspace_rev)
}
fn test_app_folder() -> (FolderPad, FolderOperations, AppRevision) {
let (mut folder_rev, mut initial_operations, workspace) = test_folder();
let mut app_rev = AppRevision::default();
app_rev.workspace_id = workspace.id;
app_rev.name = "😁 my first app".to_owned();
initial_operations = initial_operations
.compose(
&folder_rev
.create_app(app_rev.clone())
.unwrap()
.unwrap()
.operations,
)
.unwrap();
(folder_rev, initial_operations, app_rev)
}
fn test_view_folder() -> (FolderPad, FolderOperations, ViewRevision) {
let (mut folder, mut initial_operations, app) = test_app_folder();
let mut view_rev = ViewRevision::default();
view_rev.app_id = app.id.clone();
view_rev.name = "🎃 my first view".to_owned();
initial_operations = initial_operations
.compose(
&folder
.create_view(view_rev.clone())
.unwrap()
.unwrap()
.operations,
)
.unwrap();
(folder, initial_operations, view_rev)
}
fn test_trash() -> (FolderPad, FolderOperations, TrashRevision) {
let folder_rev = FolderRevision::default();
let folder_json = serde_json::to_string(&folder_rev).unwrap();
let mut operations = FolderOperationsBuilder::new().insert(&folder_json).build();
let mut trash_rev = TrashRevision::default();
trash_rev.name = "🚽 my first trash".to_owned();
trash_rev.id = "1".to_owned();
let mut folder = FolderPad::from_folder_rev(folder_rev).unwrap();
operations = operations
.compose(
&folder
.create_trash(vec![trash_rev.clone().into()])
.unwrap()
.unwrap()
.operations,
)
.unwrap();
(folder, operations, trash_rev)
}
fn make_folder_from_operations(
mut initial_operation: FolderOperations,
operations: Vec<FolderOperations>,
) -> FolderPad {
for operation in operations {
initial_operation = initial_operation.compose(&operation).unwrap();
}
FolderPad::from_operations(initial_operation).unwrap()
}
fn assert_folder_equal(old: &FolderPad, new: &FolderPad, expected: &str) {
assert_eq!(old, new);
let json1 = old.to_json().unwrap();
let json2 = new.to_json().unwrap();
// format the json str
let folder_rev: FolderRevision = serde_json::from_str(expected).unwrap();
let expected = serde_json::to_string(&folder_rev).unwrap();
assert_eq!(json1, expected);
assert_eq!(json1, json2);
}
}

View File

@ -1,11 +0,0 @@
mod builder;
mod folder_node;
mod folder_pad;
mod trash_node;
mod util;
mod workspace_node;
pub use folder_node::*;
pub use folder_node::*;
pub use folder_pad::*;
pub use workspace_node::*;

View File

@ -1,20 +0,0 @@
use crate::client_folder::util::*;
use crate::client_folder::AtomicNodeTree;
use flowy_derive::Node;
use lib_ot::core::*;
use std::sync::Arc;
#[derive(Clone, Node)]
#[node_type = "trash"]
pub struct TrashNode {
pub tree: Arc<AtomicNodeTree>,
pub node_id: Option<NodeId>,
#[node(get_value_with = "get_attributes_str_value")]
#[node(set_value_with = "set_attributes_str_value")]
pub id: String,
#[node(get_value_with = "get_attributes_str_value")]
#[node(set_value_with = "set_attributes_str_value")]
pub name: String,
}

View File

@ -1,72 +0,0 @@
use crate::client_folder::AtomicNodeTree;
use crate::errors::SyncResult;
use lib_ot::core::{AttributeHashMap, AttributeValue, Changeset, NodeId, NodeOperation};
use std::sync::Arc;
pub fn get_attributes_str_value(
tree: Arc<AtomicNodeTree>,
node_id: &NodeId,
key: &str,
) -> Option<String> {
tree
.read()
.get_node(*node_id)
.and_then(|node| node.attributes.get(key).cloned())
.and_then(|value| value.str_value())
}
pub fn set_attributes_str_value(
tree: Arc<AtomicNodeTree>,
node_id: &NodeId,
key: &str,
value: String,
) -> SyncResult<()> {
let old_attributes = match get_attributes(tree.clone(), node_id) {
None => AttributeHashMap::new(),
Some(attributes) => attributes,
};
let mut new_attributes = old_attributes.clone();
new_attributes.insert(key, value);
let path = tree.read().path_from_node_id(*node_id);
let update_operation = NodeOperation::Update {
path,
changeset: Changeset::Attributes {
new: new_attributes,
old: old_attributes,
},
};
tree.write().apply_op(update_operation)?;
Ok(())
}
#[allow(dead_code)]
pub fn get_attributes_int_value(
tree: Arc<AtomicNodeTree>,
node_id: &NodeId,
key: &str,
) -> Option<i64> {
tree
.read()
.get_node(*node_id)
.and_then(|node| node.attributes.get(key).cloned())
.and_then(|value| value.int_value())
}
pub fn get_attributes(tree: Arc<AtomicNodeTree>, node_id: &NodeId) -> Option<AttributeHashMap> {
tree
.read()
.get_node(*node_id)
.map(|node| node.attributes.clone())
}
#[allow(dead_code)]
pub fn get_attributes_value(
tree: Arc<AtomicNodeTree>,
node_id: &NodeId,
key: &str,
) -> Option<AttributeValue> {
tree
.read()
.get_node(*node_id)
.and_then(|node| node.attributes.get(key).cloned())
}

View File

@ -1,61 +0,0 @@
use crate::client_folder::util::*;
use crate::client_folder::AtomicNodeTree;
use flowy_derive::Node;
use lib_ot::core::*;
use std::sync::Arc;
#[derive(Clone, Node)]
#[node_type = "workspace"]
pub struct WorkspaceNode {
pub tree: Arc<AtomicNodeTree>,
pub node_id: Option<NodeId>,
#[node(get_value_with = "get_attributes_str_value")]
#[node(set_value_with = "set_attributes_str_value")]
pub id: String,
#[node(get_value_with = "get_attributes_str_value")]
#[node(set_value_with = "set_attributes_str_value")]
pub name: String,
#[node(child_name = "app")]
pub apps: Vec<AppNode>,
}
impl WorkspaceNode {
pub fn new(tree: Arc<AtomicNodeTree>, id: String, name: String) -> Self {
Self {
tree,
node_id: None,
id,
name,
apps: vec![],
}
}
}
#[derive(Clone, Node)]
#[node_type = "app"]
pub struct AppNode {
pub tree: Arc<AtomicNodeTree>,
pub node_id: Option<NodeId>,
#[node(get_value_with = "get_attributes_str_value")]
#[node(set_value_with = "set_attributes_str_value")]
pub id: String,
#[node(get_value_with = "get_attributes_str_value")]
#[node(set_value_with = "set_attributes_str_value")]
pub name: String,
}
impl AppNode {
pub fn new(tree: Arc<AtomicNodeTree>, id: String, name: String) -> Self {
Self {
tree,
node_id: None,
id,
name,
}
}
}

View File

@ -1,10 +0,0 @@
pub mod client_database;
pub mod client_document;
pub mod client_folder;
pub mod errors {
pub use flowy_sync::errors::*;
}
pub mod util;
pub use flowy_sync::util::*;
pub use lib_ot::text_delta::DeltaTextOperations;

View File

@ -1,129 +0,0 @@
use crate::errors::SyncError;
use dissimilar::Chunk;
use document_model::document::DocumentInfo;
use lib_ot::core::{DeltaOperationBuilder, OTString, OperationAttributes};
use lib_ot::{
core::{DeltaOperations, OperationTransform, NEW_LINE, WHITESPACE},
text_delta::DeltaTextOperations,
};
use revision_model::Revision;
use serde::de::DeserializeOwned;
#[inline]
pub fn find_newline(s: &str) -> Option<usize> {
s.find(NEW_LINE)
}
#[inline]
pub fn is_newline(s: &str) -> bool {
s == NEW_LINE
}
#[inline]
pub fn is_whitespace(s: &str) -> bool {
s == WHITESPACE
}
#[inline]
pub fn contain_newline(s: &str) -> bool {
s.contains(NEW_LINE)
}
pub fn recover_operation_from_revisions<T>(
revisions: Vec<Revision>,
validator: impl Fn(&DeltaOperations<T>) -> bool,
) -> Option<(DeltaOperations<T>, i64)>
where
T: OperationAttributes + DeserializeOwned + OperationAttributes,
{
let mut new_operations = DeltaOperations::<T>::new();
let mut rev_id = 0;
for revision in revisions {
if let Ok(operations) = DeltaOperations::<T>::from_bytes(revision.bytes) {
match new_operations.compose(&operations) {
Ok(composed_operations) => {
if validator(&composed_operations) {
rev_id = revision.rev_id;
new_operations = composed_operations;
} else {
break;
}
},
Err(_) => break,
}
} else {
break;
}
}
if new_operations.is_empty() {
None
} else {
Some((new_operations, rev_id))
}
}
#[inline]
pub fn make_document_info_from_revisions(
doc_id: &str,
revisions: Vec<Revision>,
) -> Result<Option<DocumentInfo>, SyncError> {
if revisions.is_empty() {
return Ok(None);
}
let mut delta = DeltaTextOperations::new();
let mut base_rev_id = 0;
let mut rev_id = 0;
for revision in revisions {
base_rev_id = revision.base_rev_id;
rev_id = revision.rev_id;
if revision.bytes.is_empty() {
tracing::warn!("revision delta_data is empty");
}
let new_delta = DeltaTextOperations::from_bytes(revision.bytes)?;
delta = delta.compose(&new_delta)?;
}
Ok(Some(DocumentInfo {
doc_id: doc_id.to_owned(),
data: delta.json_bytes().to_vec(),
rev_id,
base_rev_id,
}))
}
#[inline]
pub fn rev_id_from_str(s: &str) -> Result<i64, SyncError> {
let rev_id = s
.to_owned()
.parse::<i64>()
.map_err(|e| SyncError::internal().context(format!("Parse rev_id from {} failed. {}", s, e)))?;
Ok(rev_id)
}
pub fn cal_diff<T: OperationAttributes>(old: String, new: String) -> Option<DeltaOperations<T>> {
let chunks = dissimilar::diff(&old, &new);
let mut delta_builder = DeltaOperationBuilder::<T>::new();
for chunk in &chunks {
match chunk {
Chunk::Equal(s) => {
delta_builder = delta_builder.retain(OTString::from(*s).utf16_len());
},
Chunk::Delete(s) => {
delta_builder = delta_builder.delete(OTString::from(*s).utf16_len());
},
Chunk::Insert(s) => {
delta_builder = delta_builder.insert(s);
},
}
}
let delta = delta_builder.build();
if delta.is_empty() {
None
} else {
Some(delta)
}
}

View File

@ -1,75 +0,0 @@
use flowy_client_sync::client_folder::{FolderNodePad, WorkspaceNode};
#[test]
fn client_folder_create_default_folder_test() {
let folder_pad = FolderNodePad::new();
let json = folder_pad.to_json(false).unwrap();
assert_eq!(
json,
r#"{"type":"folder","children":[{"type":"workspaces"},{"type":"trash"}]}"#
);
}
#[test]
fn client_folder_create_default_folder_with_workspace_test() {
let mut folder_pad = FolderNodePad::new();
let workspace = WorkspaceNode::new(
folder_pad.tree.clone(),
"1".to_string(),
"workspace name".to_string(),
);
folder_pad.workspaces.add_workspace(workspace).unwrap();
let json = folder_pad.to_json(false).unwrap();
assert_eq!(
json,
r#"{"type":"folder","children":[{"type":"workspaces","children":[{"type":"workspace","attributes":{"id":"1","name":"workspace name"}}]},{"type":"trash"}]}"#
);
assert_eq!(
folder_pad.get_workspace("1").unwrap().get_name().unwrap(),
"workspace name"
);
}
#[test]
fn client_folder_delete_workspace_test() {
let mut folder_pad = FolderNodePad::new();
let workspace = WorkspaceNode::new(
folder_pad.tree.clone(),
"1".to_string(),
"workspace name".to_string(),
);
folder_pad.workspaces.add_workspace(workspace).unwrap();
folder_pad.workspaces.remove_workspace("1");
let json = folder_pad.to_json(false).unwrap();
assert_eq!(
json,
r#"{"type":"folder","children":[{"type":"workspaces"},{"type":"trash"}]}"#
);
}
#[test]
fn client_folder_update_workspace_name_test() {
let mut folder_pad = FolderNodePad::new();
let workspace = WorkspaceNode::new(
folder_pad.tree.clone(),
"1".to_string(),
"workspace name".to_string(),
);
folder_pad.workspaces.add_workspace(workspace).unwrap();
folder_pad
.workspaces
.get_mut_workspace("1")
.unwrap()
.set_name("my first workspace".to_string());
assert_eq!(
folder_pad
.workspaces
.get_workspace("1")
.unwrap()
.get_name()
.unwrap(),
"my first workspace"
);
}

View File

@ -1,3 +0,0 @@
mod folder_test;
mod script;
mod workspace_test;

View File

@ -1,117 +0,0 @@
use flowy_client_sync::client_folder::{AppNode, FolderNodePad, WorkspaceNode};
use folder_model::AppRevision;
use lib_ot::core::Path;
pub enum FolderNodePadScript {
CreateWorkspace {
id: String,
name: String,
},
DeleteWorkspace {
id: String,
},
AssertPathOfWorkspace {
id: String,
expected_path: Path,
},
AssertNumberOfWorkspace {
expected: usize,
},
CreateApp {
id: String,
name: String,
},
DeleteApp {
id: String,
},
UpdateApp {
id: String,
name: String,
},
AssertApp {
id: String,
expected: Option<AppRevision>,
},
AssertAppContent {
id: String,
name: String,
},
// AssertNumberOfApps { expected: usize },
}
pub struct FolderNodePadTest {
folder_pad: FolderNodePad,
}
impl FolderNodePadTest {
pub fn new() -> FolderNodePadTest {
let mut folder_pad = FolderNodePad::default();
let workspace = WorkspaceNode::new(
folder_pad.tree.clone(),
"1".to_string(),
"workspace name".to_string(),
);
folder_pad.workspaces.add_workspace(workspace).unwrap();
Self { folder_pad }
}
pub fn run_scripts(&mut self, scripts: Vec<FolderNodePadScript>) {
for script in scripts {
self.run_script(script);
}
}
pub fn run_script(&mut self, script: FolderNodePadScript) {
match script {
FolderNodePadScript::CreateWorkspace { id, name } => {
let workspace = WorkspaceNode::new(self.folder_pad.tree.clone(), id, name);
self.folder_pad.workspaces.add_workspace(workspace).unwrap();
},
FolderNodePadScript::DeleteWorkspace { id } => {
self.folder_pad.workspaces.remove_workspace(id);
},
FolderNodePadScript::AssertPathOfWorkspace { id, expected_path } => {
let workspace_node: &WorkspaceNode = self.folder_pad.workspaces.get_workspace(id).unwrap();
let node_id = workspace_node.node_id.unwrap();
let path = self.folder_pad.tree.read().path_from_node_id(node_id);
assert_eq!(path, expected_path);
},
FolderNodePadScript::AssertNumberOfWorkspace { expected } => {
assert_eq!(self.folder_pad.workspaces.len(), expected);
},
FolderNodePadScript::CreateApp { id, name } => {
let app_node = AppNode::new(self.folder_pad.tree.clone(), id, name);
let workspace_node = self.folder_pad.get_mut_workspace("1").unwrap();
workspace_node.add_app(app_node).unwrap();
},
FolderNodePadScript::DeleteApp { id } => {
let workspace_node = self.folder_pad.get_mut_workspace("1").unwrap();
workspace_node.remove_app(&id);
},
FolderNodePadScript::UpdateApp { id, name } => {
let workspace_node = self.folder_pad.get_mut_workspace("1").unwrap();
workspace_node.get_mut_app(&id).unwrap().set_name(name);
},
FolderNodePadScript::AssertApp { id, expected } => {
let workspace_node = self.folder_pad.get_workspace("1").unwrap();
let app = workspace_node.get_app(&id);
match expected {
None => assert!(app.is_none()),
Some(expected_app) => {
let app_node = app.unwrap();
assert_eq!(expected_app.name, app_node.get_name().unwrap());
assert_eq!(expected_app.id, app_node.get_id().unwrap());
},
}
},
FolderNodePadScript::AssertAppContent { id, name } => {
let workspace_node = self.folder_pad.get_workspace("1").unwrap();
let app = workspace_node.get_app(&id).unwrap();
assert_eq!(app.get_name().unwrap(), name)
}, // FolderNodePadScript::AssertNumberOfApps { expected } => {
// let workspace_node = self.folder_pad.get_workspace("1").unwrap();
// assert_eq!(workspace_node.apps.len(), expected);
// }
}
}
}

View File

@ -1,90 +0,0 @@
use crate::client_folder::script::FolderNodePadScript::*;
use crate::client_folder::script::FolderNodePadTest;
#[test]
fn client_folder_create_multi_workspaces_test() {
let mut test = FolderNodePadTest::new();
test.run_scripts(vec![
AssertPathOfWorkspace {
id: "1".to_string(),
expected_path: vec![0, 0, 0].into(),
},
CreateWorkspace {
id: "a".to_string(),
name: "workspace a".to_string(),
},
AssertPathOfWorkspace {
id: "a".to_string(),
expected_path: vec![0, 0, 1].into(),
},
CreateWorkspace {
id: "b".to_string(),
name: "workspace b".to_string(),
},
AssertPathOfWorkspace {
id: "b".to_string(),
expected_path: vec![0, 0, 2].into(),
},
AssertNumberOfWorkspace { expected: 3 },
// The path of the workspace 'b' will be changed after deleting the 'a' workspace.
DeleteWorkspace {
id: "a".to_string(),
},
AssertPathOfWorkspace {
id: "b".to_string(),
expected_path: vec![0, 0, 1].into(),
},
]);
}
#[test]
fn client_folder_create_app_test() {
let mut test = FolderNodePadTest::new();
test.run_scripts(vec![
CreateApp {
id: "1".to_string(),
name: "my first app".to_string(),
},
AssertAppContent {
id: "1".to_string(),
name: "my first app".to_string(),
},
]);
}
#[test]
fn client_folder_delete_app_test() {
let mut test = FolderNodePadTest::new();
test.run_scripts(vec![
CreateApp {
id: "1".to_string(),
name: "my first app".to_string(),
},
DeleteApp {
id: "1".to_string(),
},
AssertApp {
id: "1".to_string(),
expected: None,
},
]);
}
#[test]
fn client_folder_update_app_test() {
let mut test = FolderNodePadTest::new();
test.run_scripts(vec![
CreateApp {
id: "1".to_string(),
name: "my first app".to_string(),
},
UpdateApp {
id: "1".to_string(),
name: "TODO".to_string(),
},
AssertAppContent {
id: "1".to_string(),
name: "TODO".to_string(),
},
]);
}

View File

@ -1 +0,0 @@
mod client_folder;

View File

@ -13,14 +13,11 @@ flowy-net = { path = "../flowy-net" }
flowy-folder2 = { path = "../flowy-folder2" }
#flowy-database = { path = "../flowy-database" }
flowy-database2 = { path = "../flowy-database2" }
database-model = { path = "../../../shared-lib/database-model" }
user-model = { path = "../../../shared-lib/user-model" }
flowy-client-ws = { path = "../../../shared-lib/flowy-client-ws" }
flowy-sqlite = { path = "../flowy-sqlite", optional = true }
flowy-document = { path = "../flowy-document" }
#flowy-document = { path = "../flowy-document" }
flowy-document2 = { path = "../flowy-document2" }
flowy-revision = { path = "../flowy-revision" }
flowy-error = { path = "../flowy-error", features = ["adaptor_ws"] }
#flowy-revision = { path = "../flowy-revision" }
flowy-error = { path = "../flowy-error" }
flowy-task = { path = "../flowy-task" }
appflowy-integrate = { version = "0.1.0" }
@ -31,8 +28,6 @@ tokio = { version = "1.26", features = ["full"] }
console-subscriber = { version = "0.1.8", optional = true }
parking_lot = "0.12.1"
revision-model = { path = "../../../shared-lib/revision-model" }
ws-model = { path = "../../../shared-lib/ws-model" }
lib-ws = { path = "../../../shared-lib/lib-ws" }
lib-infra = { path = "../../../shared-lib/lib-infra" }
serde = "1.0"
@ -49,7 +44,6 @@ dart = [
"flowy-net/dart",
"flowy-folder2/dart",
"flowy-database2/dart",
"flowy-document/dart",
"flowy-document2/dart",
]
ts = [
@ -57,12 +51,10 @@ ts = [
"flowy-net/ts",
"flowy-folder2/ts",
"flowy-database2/ts",
"flowy-document/ts",
"flowy-document2/ts",
]
rev-sqlite = [
"flowy-sqlite",
"flowy-user/rev-sqlite",
"flowy-document/rev-sqlite",
]
openssl_vendored = ["flowy-sqlite/openssl_vendored"]

View File

@ -4,7 +4,6 @@ use appflowy_integrate::collab_builder::AppFlowyCollabBuilder;
use appflowy_integrate::RocksCollabDB;
use tokio::sync::RwLock;
use flowy_client_ws::FlowyWebSocketConnect;
use flowy_database2::{DatabaseManager2, DatabaseUser2};
use flowy_error::FlowyError;
use flowy_task::TaskDispatcher;
@ -14,7 +13,6 @@ pub struct Database2DepsResolver();
impl Database2DepsResolver {
pub async fn resolve(
_ws_conn: Arc<FlowyWebSocketConnect>,
user_session: Arc<UserSession>,
task_scheduler: Arc<RwLock<TaskDispatcher>>,
collab_builder: Arc<AppFlowyCollabBuilder>,

View File

@ -1,117 +0,0 @@
use bytes::Bytes;
use flowy_client_ws::FlowyWebSocketConnect;
use flowy_document::{
DocumentCloudService, DocumentConfig, DocumentDatabase, DocumentManager, DocumentUser,
};
use flowy_error::FlowyError;
use flowy_net::ClientServerConfiguration;
use flowy_net::{http_server::document::DocumentCloudServiceImpl, local_server::LocalServer};
use flowy_revision::{RevisionWebSocket, WSStateReceiver};
use flowy_sqlite::ConnectionPool;
use flowy_user::services::UserSession;
use futures_core::future::BoxFuture;
use lib_infra::future::BoxResultFuture;
use lib_ws::{WSChannel, WSMessageReceiver, WebSocketRawMessage};
use std::{convert::TryInto, path::Path, sync::Arc};
use ws_model::ws_revision::ClientRevisionWSData;
pub struct DocumentDepsResolver();
impl DocumentDepsResolver {
pub fn resolve(
local_server: Option<Arc<LocalServer>>,
ws_conn: Arc<FlowyWebSocketConnect>,
user_session: Arc<UserSession>,
server_config: &ClientServerConfiguration,
document_config: &DocumentConfig,
) -> Arc<DocumentManager> {
let user = Arc::new(BlockUserImpl(user_session.clone()));
let rev_web_socket = Arc::new(DocumentRevisionWebSocket(ws_conn.clone()));
let cloud_service: Arc<dyn DocumentCloudService> = match local_server {
None => Arc::new(DocumentCloudServiceImpl::new(server_config.clone())),
Some(local_server) => local_server,
};
let database = Arc::new(DocumentDatabaseImpl(user_session));
let manager = Arc::new(DocumentManager::new(
cloud_service,
user,
database,
rev_web_socket,
document_config.clone(),
));
let receiver = Arc::new(DocumentWSMessageReceiverImpl(manager.clone()));
ws_conn.add_ws_message_receiver(receiver).unwrap();
manager
}
}
struct BlockUserImpl(Arc<UserSession>);
impl DocumentUser for BlockUserImpl {
fn user_dir(&self) -> Result<String, FlowyError> {
let dir = self
.0
.user_dir()
.map_err(|e| FlowyError::unauthorized().context(e))?;
let doc_dir = format!("{}/document", dir);
if !Path::new(&doc_dir).exists() {
std::fs::create_dir_all(&doc_dir)?;
}
Ok(doc_dir)
}
fn user_id(&self) -> Result<i64, FlowyError> {
self.0.user_id()
}
fn token(&self) -> Result<String, FlowyError> {
self.0.token()
}
}
struct DocumentDatabaseImpl(Arc<UserSession>);
impl DocumentDatabase for DocumentDatabaseImpl {
fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError> {
self.0.db_pool()
}
}
struct DocumentRevisionWebSocket(Arc<FlowyWebSocketConnect>);
impl RevisionWebSocket for DocumentRevisionWebSocket {
fn send(&self, data: ClientRevisionWSData) -> BoxResultFuture<(), FlowyError> {
let bytes: Bytes = data.try_into().unwrap();
let _msg = WebSocketRawMessage {
channel: WSChannel::Document,
data: bytes.to_vec(),
};
let _ws_conn = self.0.clone();
Box::pin(async move {
// match ws_conn.web_socket().await? {
// None => {},
// Some(sender) => {
// sender.send(msg).map_err(internal_error)?;
// },
// }
Ok(())
})
}
fn subscribe_state_changed(&self) -> BoxFuture<WSStateReceiver> {
let ws_conn = self.0.clone();
Box::pin(async move { ws_conn.subscribe_websocket_state().await })
}
}
struct DocumentWSMessageReceiverImpl(Arc<DocumentManager>);
impl WSMessageReceiver for DocumentWSMessageReceiverImpl {
fn source(&self) -> WSChannel {
WSChannel::Document
}
fn receive_message(&self, msg: WebSocketRawMessage) {
let handler = self.0.clone();
tokio::spawn(async move {
handler.receive_ws_data(Bytes::from(msg.data)).await;
});
}
}

View File

@ -1,13 +1,11 @@
pub use database_deps::*;
pub use document2_deps::*;
pub use folder2_deps::*;
pub use user_deps::*;
mod document2_deps;
mod document_deps;
mod folder2_deps;
mod user_deps;
mod util;
pub use document2_deps::*;
pub use document_deps::*;
pub use folder2_deps::*;
pub use user_deps::*;
mod database_deps;

View File

@ -1,9 +1,10 @@
use flowy_net::ClientServerConfiguration;
use flowy_net::{http_server::user::UserHttpCloudService, local_server::LocalServer};
use flowy_user::event_map::UserCloudService;
use std::sync::Arc;
use flowy_net::http_server::self_host::configuration::ClientServerConfiguration;
use flowy_net::http_server::self_host::user::UserHttpCloudService;
use flowy_net::local_server::LocalServer;
use flowy_user::event_map::UserCloudService;
pub struct UserDepsResolver();
impl UserDepsResolver {
pub fn resolve(

View File

@ -10,20 +10,17 @@ use std::{
use appflowy_integrate::collab_builder::AppFlowyCollabBuilder;
use appflowy_integrate::config::{AWSDynamoDBConfig, AppFlowyCollabConfig};
use tokio::sync::{broadcast, RwLock};
use tokio::sync::RwLock;
use flowy_client_ws::{listen_on_websocket, FlowyWebSocketConnect, NetworkType};
use flowy_database2::DatabaseManager2;
use flowy_document::entities::DocumentVersionPB;
use flowy_document::{DocumentConfig, DocumentManager};
use flowy_document2::manager::DocumentManager as DocumentManager2;
use flowy_error::FlowyResult;
use flowy_folder2::manager::Folder2Manager;
pub use flowy_net::get_client_server_configuration;
use flowy_net::http_server::self_host::configuration::ClientServerConfiguration;
use flowy_net::local_server::LocalServer;
use flowy_net::ClientServerConfiguration;
use flowy_sqlite::kv::KV;
use flowy_task::{TaskDispatcher, TaskRunner};
use flowy_user::entities::UserProfile;
use flowy_user::event_map::UserStatusCallback;
use flowy_user::services::{UserSession, UserSessionConfig};
use lib_dispatch::prelude::*;
@ -31,7 +28,6 @@ use lib_dispatch::runtime::tokio_default_runtime;
use lib_infra::future::{to_fut, Fut};
use module::make_plugins;
pub use module::*;
use user_model::UserProfile;
use crate::deps_resolve::*;
@ -52,7 +48,6 @@ pub struct AppFlowyCoreConfig {
storage_path: String,
log_filter: String,
server_config: ClientServerConfiguration,
pub document: DocumentConfig,
}
impl fmt::Debug for AppFlowyCoreConfig {
@ -60,7 +55,6 @@ impl fmt::Debug for AppFlowyCoreConfig {
f.debug_struct("AppFlowyCoreConfig")
.field("storage_path", &self.storage_path)
.field("server-config", &self.server_config)
.field("document-config", &self.document)
.finish()
}
}
@ -72,15 +66,9 @@ impl AppFlowyCoreConfig {
storage_path: root.to_owned(),
log_filter: create_log_filter("info".to_owned(), vec![]),
server_config,
document: DocumentConfig::default(),
}
}
pub fn with_document_version(mut self, version: DocumentVersionPB) -> Self {
self.document.version = version;
self
}
pub fn log_filter(mut self, level: &str, with_crates: Vec<String>) -> Self {
self.log_filter = create_log_filter(level.to_owned(), with_crates);
self
@ -94,26 +82,19 @@ fn create_log_filter(level: String, with_crates: Vec<String>) -> String {
.map(|crate_name| format!("{}={}", crate_name, level))
.collect::<Vec<String>>();
filters.push(format!("flowy_core={}", level));
filters.push(format!("flowy_folder={}", level));
filters.push(format!("flowy_folder2={}", level));
filters.push(format!("collab_folder={}", level));
// filters.push(format!("collab_persistence={}", level));
filters.push(format!("collab_database={}", level));
filters.push(format!("collab_plugins={}", level));
filters.push(format!("appflowy_integrate={}", level));
filters.push(format!("collab={}", level));
filters.push(format!("flowy_user={}", level));
filters.push(format!("flowy_document={}", level));
filters.push(format!("flowy_document2={}", level));
filters.push(format!("flowy_database={}", level));
filters.push(format!("flowy_database2={}", level));
filters.push(format!("flowy_sync={}", "info"));
filters.push(format!("flowy_client_sync={}", "info"));
filters.push(format!("flowy_notification={}", "info"));
filters.push(format!("lib_ot={}", level));
filters.push(format!("lib_ws={}", level));
filters.push(format!("lib_infra={}", level));
filters.push(format!("flowy_sync={}", level));
filters.push(format!("flowy_revision={}", level));
filters.push(format!("flowy_revision_persistence={}", level));
filters.push(format!("flowy_task={}", level));
// filters.push(format!("lib_dispatch={}", level));
@ -134,13 +115,11 @@ pub struct AppFlowyCore {
#[allow(dead_code)]
pub config: AppFlowyCoreConfig,
pub user_session: Arc<UserSession>,
pub document_manager: Arc<DocumentManager>,
pub document_manager2: Arc<DocumentManager2>,
pub folder_manager: Arc<Folder2Manager>,
// pub database_manager: Arc<DatabaseManager>,
pub database_manager: Arc<DatabaseManager2>,
pub event_dispatcher: Arc<AFPluginDispatcher>,
pub ws_conn: Arc<FlowyWebSocketConnect>,
pub local_server: Option<Arc<LocalServer>>,
pub task_dispatcher: Arc<RwLock<TaskDispatcher>>,
}
@ -162,65 +141,44 @@ impl AppFlowyCore {
let task_dispatcher = Arc::new(RwLock::new(task_scheduler));
runtime.spawn(TaskRunner::run(task_dispatcher.clone()));
let (local_server, ws_conn) = mk_local_server(&config.server_config);
let (
user_session,
document_manager,
folder_manager,
local_server,
database_manager,
document_manager2,
) = runtime.block_on(async {
let user_session = mk_user_session(&config, &local_server, &config.server_config);
let local_server = mk_local_server(&config.server_config);
let (user_session, folder_manager, local_server, database_manager, document_manager2) = runtime
.block_on(async {
let user_session = mk_user_session(&config, &local_server, &config.server_config);
let document_manager = DocumentDepsResolver::resolve(
local_server.clone(),
ws_conn.clone(),
user_session.clone(),
&config.server_config,
&config.document,
);
let database_manager2 = Database2DepsResolver::resolve(
ws_conn.clone(),
user_session.clone(),
task_dispatcher.clone(),
collab_builder.clone(),
)
.await;
let database_manager2 = Database2DepsResolver::resolve(
user_session.clone(),
task_dispatcher.clone(),
collab_builder.clone(),
)
.await;
let document_manager2 = Document2DepsResolver::resolve(
user_session.clone(),
&database_manager2,
collab_builder.clone(),
);
let document_manager2 = Document2DepsResolver::resolve(
user_session.clone(),
&database_manager2,
collab_builder.clone(),
);
let folder_manager = Folder2DepsResolver::resolve(
user_session.clone(),
&document_manager2,
&database_manager2,
collab_builder.clone(),
)
.await;
let folder_manager = Folder2DepsResolver::resolve(
user_session.clone(),
&document_manager2,
&database_manager2,
collab_builder.clone(),
)
.await;
if let Some(local_server) = local_server.as_ref() {
local_server.run();
}
ws_conn.init().await;
(
user_session,
document_manager,
folder_manager,
local_server,
database_manager2,
document_manager2,
)
});
(
user_session,
folder_manager,
local_server,
database_manager2,
document_manager2,
)
});
let user_status_listener = UserStatusListener {
document_manager: document_manager.clone(),
folder_manager: folder_manager.clone(),
database_manager: database_manager.clone(),
ws_conn: ws_conn.clone(),
config: config.clone(),
};
let user_status_callback = UserStatusCallbackImpl {
@ -233,25 +191,20 @@ impl AppFlowyCore {
let event_dispatcher = Arc::new(AFPluginDispatcher::construct(runtime, || {
make_plugins(
&ws_conn,
&folder_manager,
&database_manager,
&user_session,
&document_manager,
&document_manager2,
)
}));
_start_listening(&event_dispatcher, &ws_conn, &folder_manager);
Self {
config,
user_session,
document_manager,
document_manager2,
folder_manager,
database_manager,
event_dispatcher,
ws_conn,
local_server,
task_dispatcher,
}
@ -262,43 +215,15 @@ impl AppFlowyCore {
}
}
fn _start_listening(
event_dispatcher: &AFPluginDispatcher,
ws_conn: &Arc<FlowyWebSocketConnect>,
folder_manager: &Arc<Folder2Manager>,
) {
let subscribe_network_type = ws_conn.subscribe_network_ty();
let folder_manager = folder_manager.clone();
let _cloned_folder_manager = folder_manager;
let ws_conn = ws_conn.clone();
event_dispatcher.spawn(async move {
listen_on_websocket(ws_conn.clone());
});
event_dispatcher.spawn(async move {
_listen_network_status(subscribe_network_type).await;
});
}
fn mk_local_server(
server_config: &ClientServerConfiguration,
) -> (Option<Arc<LocalServer>>, Arc<FlowyWebSocketConnect>) {
let ws_addr = server_config.ws_addr();
fn mk_local_server(server_config: &ClientServerConfiguration) -> Option<Arc<LocalServer>> {
// let ws_addr = server_config.ws_addr();
if cfg!(feature = "http_sync") {
let ws_conn = Arc::new(FlowyWebSocketConnect::new(ws_addr));
(None, ws_conn)
// let ws_conn = Arc::new(FlowyWebSocketConnect::new(ws_addr));
None
} else {
let context = flowy_net::local_server::build_server(server_config);
let local_ws = Arc::new(context.local_ws);
let ws_conn = Arc::new(FlowyWebSocketConnect::from_local(ws_addr, local_ws));
(Some(Arc::new(context.local_server)), ws_conn)
}
}
async fn _listen_network_status(mut subscribe: broadcast::Receiver<NetworkType>) {
while let Ok(_new_type) = subscribe.recv().await {
// core.network_state_changed(new_type);
// let ws_conn = Arc::new(FlowyWebSocketConnect::from_local(ws_addr, local_ws));
Some(Arc::new(context.local_server))
}
}
@ -347,10 +272,8 @@ fn mk_user_session(
}
struct UserStatusListener {
document_manager: Arc<DocumentManager>,
folder_manager: Arc<Folder2Manager>,
database_manager: Arc<DatabaseManager2>,
ws_conn: Arc<FlowyWebSocketConnect>,
#[allow(dead_code)]
config: AppFlowyCoreConfig,
}
@ -358,12 +281,11 @@ struct UserStatusListener {
impl UserStatusListener {
async fn did_sign_in(&self, token: &str, user_id: i64) -> FlowyResult<()> {
self.folder_manager.initialize(user_id).await?;
self.document_manager.initialize(user_id).await?;
self.database_manager.initialize(user_id, token).await?;
self
.ws_conn
.start(token.to_owned(), user_id.to_owned())
.await?;
// self
// .ws_conn
// .start(token.to_owned(), user_id.to_owned())
// .await?;
Ok(())
}
@ -372,26 +294,17 @@ impl UserStatusListener {
.folder_manager
.initialize_with_new_user(user_profile.id, &user_profile.token)
.await?;
self
.document_manager
.initialize_with_new_user(user_profile.id, &user_profile.token)
.await?;
self
.database_manager
.initialize_with_new_user(user_profile.id, &user_profile.token)
.await?;
self
.ws_conn
.start(user_profile.token.clone(), user_profile.id)
.await?;
Ok(())
}
async fn did_expired(&self, _token: &str, user_id: i64) -> FlowyResult<()> {
self.folder_manager.clear(user_id).await;
self.ws_conn.stop().await;
Ok(())
}
}

View File

@ -1,33 +1,27 @@
use std::sync::Arc;
use flowy_client_ws::FlowyWebSocketConnect;
use flowy_database2::DatabaseManager2;
use flowy_document::DocumentManager;
use flowy_document2::manager::DocumentManager as DocumentManager2;
use flowy_folder2::manager::Folder2Manager;
use flowy_user::services::UserSession;
use lib_dispatch::prelude::AFPlugin;
pub fn make_plugins(
ws_conn: &Arc<FlowyWebSocketConnect>,
folder_manager: &Arc<Folder2Manager>,
database_manager: &Arc<DatabaseManager2>,
user_session: &Arc<UserSession>,
document_manager: &Arc<DocumentManager>,
document_manager2: &Arc<DocumentManager2>,
) -> Vec<AFPlugin> {
let user_plugin = flowy_user::event_map::init(user_session.clone());
let folder_plugin = flowy_folder2::event_map::init(folder_manager.clone());
let network_plugin = flowy_net::event_map::init(ws_conn.clone());
let network_plugin = flowy_net::event_map::init();
let database_plugin = flowy_database2::event_map::init(database_manager.clone());
let document_plugin = flowy_document::event_map::init(document_manager.clone());
let document_plugin2 = flowy_document2::event_map::init(document_manager2.clone());
vec![
user_plugin,
folder_plugin,
network_plugin,
database_plugin,
document_plugin,
document_plugin2,
]
}

View File

@ -1,63 +0,0 @@
[package]
name = "flowy-database"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
lib-dispatch = { path = "../lib-dispatch" }
flowy-notification = { path = "../flowy-notification" }
flowy-revision = { path = "../flowy-revision" }
flowy-revision-persistence = { path = "../flowy-revision-persistence" }
flowy-task= { path = "../flowy-task" }
flowy-error = { path = "../flowy-error", features = ["adaptor_database", "adaptor_dispatch"]}
flowy-derive = { path = "../flowy-derive" }
lib-ot = { path = "../../../shared-lib/lib-ot" }
lib-infra = { path = "../../../shared-lib/lib-infra" }
database-model = { path = "../../../shared-lib/database-model" }
flowy-client-sync = { path = "../flowy-client-sync"}
revision-model = { path = "../../../shared-lib/revision-model" }
flowy-sqlite = { path = "../flowy-sqlite", optional = true }
anyhow = "1.0"
strum = "0.21"
strum_macros = "0.21"
tracing = { version = "0.1", features = ["log"] }
protobuf = {version = "2.28.0"}
rust_decimal = "1.28.1"
rusty-money = {version = "0.4.1", features = ["iso"]}
lazy_static = "1.4.0"
chrono = "0.4.23"
nanoid = "0.4.0"
bytes = { version = "1.4" }
diesel = {version = "1.4.8", features = ["sqlite"]}
dashmap = "5"
tokio = { version = "1.26", features = ["sync"]}
rayon = "1.6.1"
serde = { version = "1.0", features = ["derive"] }
serde_json = {version = "1.0"}
serde_repr = "0.1"
indexmap = {version = "1.9.2", features = ["serde"]}
fancy-regex = "0.10.0"
regex = "1.7.1"
url = { version = "2"}
futures = "0.3.26"
atomic_refcell = "0.1.9"
crossbeam-utils = "0.8.15"
async-stream = "0.3.4"
parking_lot = "0.12.1"
[dev-dependencies]
flowy-test = { path = "../flowy-test" }
#flowy-database = { path = "", features = ["flowy_unit_test"]}
[build-dependencies]
flowy-codegen = { path = "../flowy-codegen"}
[features]
default = ["rev-sqlite"]
rev-sqlite = ["flowy-sqlite"]
dart = ["flowy-codegen/dart", "flowy-notification/dart"]
ts = ["flowy-codegen/ts", "flowy-notification/ts"]
flowy_unit_test = ["flowy-revision/flowy_unit_test"]

View File

@ -1,8 +0,0 @@
# Check out the FlowyConfig (located in flowy_toml.rs) for more details.
proto_input = [
"src/event_map.rs",
"src/services/field/type_options",
"src/entities",
"src/notification.rs"
]
event_files = ["src/event_map.rs"]

View File

@ -1,10 +0,0 @@
fn main() {
let crate_name = env!("CARGO_PKG_NAME");
flowy_codegen::protobuf_file::gen(crate_name);
#[cfg(feature = "dart")]
flowy_codegen::dart_event::gen(crate_name);
#[cfg(feature = "ts")]
flowy_codegen::ts_event::gen(crate_name);
}

View File

@ -1,136 +0,0 @@
use crate::entities::parser::NotEmptyStr;
use database_model::{CalendarLayout, CalendarLayoutSetting};
use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
use flowy_error::ErrorCode;
#[derive(Debug, Clone, Eq, PartialEq, Default, ProtoBuf)]
pub struct CalendarLayoutSettingsPB {
#[pb(index = 1)]
pub layout_field_id: String,
#[pb(index = 2)]
pub layout_ty: CalendarLayoutPB,
#[pb(index = 3)]
pub first_day_of_week: i32,
#[pb(index = 4)]
pub show_weekends: bool,
#[pb(index = 5)]
pub show_week_numbers: bool,
}
impl std::convert::From<CalendarLayoutSettingsPB> for CalendarLayoutSetting {
fn from(pb: CalendarLayoutSettingsPB) -> Self {
CalendarLayoutSetting {
layout_ty: pb.layout_ty.into(),
first_day_of_week: pb.first_day_of_week,
show_weekends: pb.show_weekends,
show_week_numbers: pb.show_week_numbers,
layout_field_id: pb.layout_field_id,
}
}
}
impl std::convert::From<CalendarLayoutSetting> for CalendarLayoutSettingsPB {
fn from(params: CalendarLayoutSetting) -> Self {
CalendarLayoutSettingsPB {
layout_field_id: params.layout_field_id,
layout_ty: params.layout_ty.into(),
first_day_of_week: params.first_day_of_week,
show_weekends: params.show_weekends,
show_week_numbers: params.show_week_numbers,
}
}
}
#[derive(Debug, Clone, Eq, PartialEq, Default, ProtoBuf_Enum)]
#[repr(u8)]
pub enum CalendarLayoutPB {
#[default]
MonthLayout = 0,
WeekLayout = 1,
DayLayout = 2,
}
impl std::convert::From<CalendarLayoutPB> for CalendarLayout {
fn from(pb: CalendarLayoutPB) -> Self {
match pb {
CalendarLayoutPB::MonthLayout => CalendarLayout::MonthLayout,
CalendarLayoutPB::WeekLayout => CalendarLayout::WeekLayout,
CalendarLayoutPB::DayLayout => CalendarLayout::DayLayout,
}
}
}
impl std::convert::From<CalendarLayout> for CalendarLayoutPB {
fn from(layout: CalendarLayout) -> Self {
match layout {
CalendarLayout::MonthLayout => CalendarLayoutPB::MonthLayout,
CalendarLayout::WeekLayout => CalendarLayoutPB::WeekLayout,
CalendarLayout::DayLayout => CalendarLayoutPB::DayLayout,
}
}
}
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct CalendarEventRequestPB {
#[pb(index = 1)]
pub view_id: String,
// Currently, requesting the events within the specified month
// is not supported
#[pb(index = 2)]
pub month: String,
}
#[derive(Debug, Clone, Default)]
pub struct CalendarEventRequestParams {
pub view_id: String,
pub month: String,
}
impl TryInto<CalendarEventRequestParams> for CalendarEventRequestPB {
type Error = ErrorCode;
fn try_into(self) -> Result<CalendarEventRequestParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::ViewIdIsInvalid)?;
Ok(CalendarEventRequestParams {
view_id: view_id.0,
month: self.month,
})
}
}
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct CalendarEventPB {
#[pb(index = 1)]
pub row_id: String,
#[pb(index = 2)]
pub date_field_id: String,
#[pb(index = 3)]
pub title: String,
#[pb(index = 4)]
pub timestamp: i64,
}
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct RepeatedCalendarEventPB {
#[pb(index = 1)]
pub items: Vec<CalendarEventPB>,
}
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct MoveCalendarEventPB {
#[pb(index = 1)]
pub row_id: String,
#[pb(index = 2)]
pub field_id: String,
#[pb(index = 3)]
pub timestamp: i64,
}

View File

@ -1,173 +0,0 @@
use crate::entities::parser::NotEmptyStr;
use crate::entities::FieldType;
use database_model::{CellRevision, RowChangeset};
use flowy_derive::ProtoBuf;
use flowy_error::ErrorCode;
use std::collections::HashMap;
#[derive(ProtoBuf, Default)]
pub struct CreateSelectOptionPayloadPB {
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 2)]
pub view_id: String,
#[pb(index = 3)]
pub option_name: String,
}
pub struct CreateSelectOptionParams {
pub field_id: String,
pub view_id: String,
pub option_name: String,
}
impl TryInto<CreateSelectOptionParams> for CreateSelectOptionPayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<CreateSelectOptionParams, Self::Error> {
let option_name =
NotEmptyStr::parse(self.option_name).map_err(|_| ErrorCode::SelectOptionNameIsEmpty)?;
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::ViewIdIsInvalid)?;
let field_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::FieldIdIsEmpty)?;
Ok(CreateSelectOptionParams {
field_id: field_id.0,
option_name: option_name.0,
view_id: view_id.0,
})
}
}
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct CellIdPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub field_id: String,
#[pb(index = 3)]
pub row_id: String,
}
/// Represents as the cell identifier. It's used to locate the cell in corresponding
/// view's row with the field id.
pub struct CellIdParams {
pub view_id: String,
pub field_id: String,
pub row_id: String,
}
impl TryInto<CellIdParams> for CellIdPB {
type Error = ErrorCode;
fn try_into(self) -> Result<CellIdParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let field_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::FieldIdIsEmpty)?;
let row_id = NotEmptyStr::parse(self.row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?;
Ok(CellIdParams {
view_id: view_id.0,
field_id: field_id.0,
row_id: row_id.0,
})
}
}
/// Represents as the data of the cell.
#[derive(Debug, Default, ProtoBuf)]
pub struct CellPB {
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 2)]
pub row_id: String,
/// Encoded the data using the helper struct `CellProtobufBlob`.
/// Check out the `CellProtobufBlob` for more information.
#[pb(index = 3)]
pub data: Vec<u8>,
/// the field_type will be None if the field with field_id is not found
#[pb(index = 4, one_of)]
pub field_type: Option<FieldType>,
}
impl CellPB {
pub fn new(field_id: &str, row_id: &str, field_type: FieldType, data: Vec<u8>) -> Self {
Self {
field_id: field_id.to_owned(),
row_id: row_id.to_string(),
data,
field_type: Some(field_type),
}
}
pub fn empty(field_id: &str, row_id: &str) -> Self {
Self {
field_id: field_id.to_owned(),
row_id: row_id.to_owned(),
data: vec![],
field_type: None,
}
}
}
#[derive(Debug, Default, ProtoBuf)]
pub struct RepeatedCellPB {
#[pb(index = 1)]
pub items: Vec<CellPB>,
}
impl std::ops::Deref for RepeatedCellPB {
type Target = Vec<CellPB>;
fn deref(&self) -> &Self::Target {
&self.items
}
}
impl std::ops::DerefMut for RepeatedCellPB {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.items
}
}
impl std::convert::From<Vec<CellPB>> for RepeatedCellPB {
fn from(items: Vec<CellPB>) -> Self {
Self { items }
}
}
///
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct CellChangesetPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub row_id: String,
#[pb(index = 3)]
pub field_id: String,
#[pb(index = 4)]
pub type_cell_data: String,
}
impl std::convert::From<CellChangesetPB> for RowChangeset {
fn from(changeset: CellChangesetPB) -> Self {
let mut cell_by_field_id = HashMap::with_capacity(1);
let field_id = changeset.field_id;
let cell_rev = CellRevision {
type_cell_data: changeset.type_cell_data,
};
cell_by_field_id.insert(field_id, cell_rev);
RowChangeset {
row_id: changeset.row_id,
height: None,
visibility: None,
cell_by_field_id,
}
}
}

View File

@ -1,205 +0,0 @@
use crate::entities::parser::NotEmptyStr;
use crate::entities::{DatabaseLayoutPB, FieldIdPB, RowPB};
use flowy_derive::ProtoBuf;
use flowy_error::ErrorCode;
/// [DatabasePB] describes how many fields and blocks the grid has
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct DatabasePB {
#[pb(index = 1)]
pub id: String,
#[pb(index = 2)]
pub fields: Vec<FieldIdPB>,
#[pb(index = 3)]
pub rows: Vec<RowPB>,
}
#[derive(ProtoBuf, Default)]
pub struct CreateDatabasePayloadPB {
#[pb(index = 1)]
pub name: String,
}
#[derive(Clone, ProtoBuf, Default, Debug)]
pub struct DatabaseViewIdPB {
#[pb(index = 1)]
pub value: String,
}
impl AsRef<str> for DatabaseViewIdPB {
fn as_ref(&self) -> &str {
&self.value
}
}
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct MoveFieldPayloadPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub field_id: String,
#[pb(index = 3)]
pub from_index: i32,
#[pb(index = 4)]
pub to_index: i32,
}
#[derive(Clone)]
pub struct MoveFieldParams {
pub view_id: String,
pub field_id: String,
pub from_index: i32,
pub to_index: i32,
}
impl TryInto<MoveFieldParams> for MoveFieldPayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<MoveFieldParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?;
let item_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::InvalidData)?;
Ok(MoveFieldParams {
view_id: view_id.0,
field_id: item_id.0,
from_index: self.from_index,
to_index: self.to_index,
})
}
}
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct MoveRowPayloadPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub from_row_id: String,
#[pb(index = 4)]
pub to_row_id: String,
}
pub struct MoveRowParams {
pub view_id: String,
pub from_row_id: String,
pub to_row_id: String,
}
impl TryInto<MoveRowParams> for MoveRowPayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<MoveRowParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?;
let from_row_id = NotEmptyStr::parse(self.from_row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?;
let to_row_id = NotEmptyStr::parse(self.to_row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?;
Ok(MoveRowParams {
view_id: view_id.0,
from_row_id: from_row_id.0,
to_row_id: to_row_id.0,
})
}
}
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct MoveGroupRowPayloadPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub from_row_id: String,
#[pb(index = 3)]
pub to_group_id: String,
#[pb(index = 4, one_of)]
pub to_row_id: Option<String>,
}
pub struct MoveGroupRowParams {
pub view_id: String,
pub from_row_id: String,
pub to_group_id: String,
pub to_row_id: Option<String>,
}
impl TryInto<MoveGroupRowParams> for MoveGroupRowPayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<MoveGroupRowParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?;
let from_row_id = NotEmptyStr::parse(self.from_row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?;
let to_group_id =
NotEmptyStr::parse(self.to_group_id).map_err(|_| ErrorCode::GroupIdIsEmpty)?;
let to_row_id = match self.to_row_id {
None => None,
Some(to_row_id) => Some(
NotEmptyStr::parse(to_row_id)
.map_err(|_| ErrorCode::RowIdIsEmpty)?
.0,
),
};
Ok(MoveGroupRowParams {
view_id: view_id.0,
from_row_id: from_row_id.0,
to_group_id: to_group_id.0,
to_row_id,
})
}
}
#[derive(Debug, Default, ProtoBuf)]
pub struct DatabaseDescriptionPB {
#[pb(index = 1)]
pub name: String,
#[pb(index = 2)]
pub database_id: String,
}
#[derive(Debug, Default, ProtoBuf)]
pub struct RepeatedDatabaseDescriptionPB {
#[pb(index = 1)]
pub items: Vec<DatabaseDescriptionPB>,
}
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct DatabaseGroupIdPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub group_id: String,
}
pub struct DatabaseGroupIdParams {
pub view_id: String,
pub group_id: String,
}
impl TryInto<DatabaseGroupIdParams> for DatabaseGroupIdPB {
type Error = ErrorCode;
fn try_into(self) -> Result<DatabaseGroupIdParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?;
let group_id = NotEmptyStr::parse(self.group_id).map_err(|_| ErrorCode::GroupIdIsEmpty)?;
Ok(DatabaseGroupIdParams {
view_id: view_id.0,
group_id: group_id.0,
})
}
}
#[derive(Clone, ProtoBuf, Default, Debug)]
pub struct DatabaseLayoutIdPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub layout: DatabaseLayoutPB,
}

View File

@ -1,671 +0,0 @@
use database_model::{FieldRevision, FieldTypeRevision};
use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
use flowy_error::ErrorCode;
use serde_repr::*;
use std::sync::Arc;
use crate::entities::parser::NotEmptyStr;
use strum_macros::{Display, EnumCount as EnumCountMacro, EnumIter, EnumString};
/// [FieldPB] defines a Field's attributes. Such as the name, field_type, and width. etc.
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct FieldPB {
#[pb(index = 1)]
pub id: String,
#[pb(index = 2)]
pub name: String,
#[pb(index = 3)]
pub desc: String,
#[pb(index = 4)]
pub field_type: FieldType,
#[pb(index = 5)]
pub frozen: bool,
#[pb(index = 6)]
pub visibility: bool,
#[pb(index = 7)]
pub width: i32,
#[pb(index = 8)]
pub is_primary: bool,
}
impl std::convert::From<FieldRevision> for FieldPB {
fn from(field_rev: FieldRevision) -> Self {
Self {
id: field_rev.id,
name: field_rev.name,
desc: field_rev.desc,
field_type: field_rev.ty.into(),
frozen: field_rev.frozen,
visibility: field_rev.visibility,
width: field_rev.width,
is_primary: field_rev.is_primary,
}
}
}
impl std::convert::From<Arc<FieldRevision>> for FieldPB {
fn from(field_rev: Arc<FieldRevision>) -> Self {
let field_rev = field_rev.as_ref().clone();
FieldPB::from(field_rev)
}
}
/// [FieldIdPB] id of the [Field]
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct FieldIdPB {
#[pb(index = 1)]
pub field_id: String,
}
impl std::convert::From<&str> for FieldIdPB {
fn from(s: &str) -> Self {
FieldIdPB {
field_id: s.to_owned(),
}
}
}
impl std::convert::From<String> for FieldIdPB {
fn from(s: String) -> Self {
FieldIdPB { field_id: s }
}
}
impl std::convert::From<&Arc<FieldRevision>> for FieldIdPB {
fn from(field_rev: &Arc<FieldRevision>) -> Self {
Self {
field_id: field_rev.id.clone(),
}
}
}
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct DatabaseFieldChangesetPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub inserted_fields: Vec<IndexFieldPB>,
#[pb(index = 3)]
pub deleted_fields: Vec<FieldIdPB>,
#[pb(index = 4)]
pub updated_fields: Vec<FieldPB>,
}
impl DatabaseFieldChangesetPB {
pub fn insert(database_id: &str, inserted_fields: Vec<IndexFieldPB>) -> Self {
Self {
view_id: database_id.to_owned(),
inserted_fields,
deleted_fields: vec![],
updated_fields: vec![],
}
}
pub fn delete(database_id: &str, deleted_fields: Vec<FieldIdPB>) -> Self {
Self {
view_id: database_id.to_string(),
inserted_fields: vec![],
deleted_fields,
updated_fields: vec![],
}
}
pub fn update(database_id: &str, updated_fields: Vec<FieldPB>) -> Self {
Self {
view_id: database_id.to_string(),
inserted_fields: vec![],
deleted_fields: vec![],
updated_fields,
}
}
}
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct IndexFieldPB {
#[pb(index = 1)]
pub field: FieldPB,
#[pb(index = 2)]
pub index: i32,
}
impl IndexFieldPB {
pub fn from_field_rev(field_rev: &Arc<FieldRevision>, index: usize) -> Self {
Self {
field: FieldPB::from(field_rev.as_ref().clone()),
index: index as i32,
}
}
}
#[derive(Debug, Default, ProtoBuf)]
pub struct CreateFieldPayloadPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub field_type: FieldType,
#[pb(index = 3, one_of)]
pub type_option_data: Option<Vec<u8>>,
}
#[derive(Clone)]
pub struct CreateFieldParams {
pub view_id: String,
pub field_type: FieldType,
pub type_option_data: Option<Vec<u8>>,
}
impl TryInto<CreateFieldParams> for CreateFieldPayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<CreateFieldParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
Ok(CreateFieldParams {
view_id: view_id.0,
field_type: self.field_type,
type_option_data: self.type_option_data,
})
}
}
#[derive(Debug, Default, ProtoBuf)]
pub struct UpdateFieldTypePayloadPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub field_id: String,
#[pb(index = 3)]
pub field_type: FieldType,
#[pb(index = 4)]
pub create_if_not_exist: bool,
}
pub struct EditFieldParams {
pub view_id: String,
pub field_id: String,
pub field_type: FieldType,
}
impl TryInto<EditFieldParams> for UpdateFieldTypePayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<EditFieldParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let field_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::FieldIdIsEmpty)?;
Ok(EditFieldParams {
view_id: view_id.0,
field_id: field_id.0,
field_type: self.field_type,
})
}
}
#[derive(Debug, Default, ProtoBuf)]
pub struct TypeOptionPathPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub field_id: String,
#[pb(index = 3)]
pub field_type: FieldType,
}
pub struct TypeOptionPathParams {
pub view_id: String,
pub field_id: String,
pub field_type: FieldType,
}
impl TryInto<TypeOptionPathParams> for TypeOptionPathPB {
type Error = ErrorCode;
fn try_into(self) -> Result<TypeOptionPathParams, Self::Error> {
let database_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let field_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::FieldIdIsEmpty)?;
Ok(TypeOptionPathParams {
view_id: database_id.0,
field_id: field_id.0,
field_type: self.field_type,
})
}
}
#[derive(Debug, Default, ProtoBuf)]
pub struct TypeOptionPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub field: FieldPB,
#[pb(index = 3)]
pub type_option_data: Vec<u8>,
}
/// Collection of the [FieldPB]
#[derive(Debug, Default, ProtoBuf)]
pub struct RepeatedFieldPB {
#[pb(index = 1)]
pub items: Vec<FieldPB>,
}
impl std::ops::Deref for RepeatedFieldPB {
type Target = Vec<FieldPB>;
fn deref(&self) -> &Self::Target {
&self.items
}
}
impl std::ops::DerefMut for RepeatedFieldPB {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.items
}
}
impl std::convert::From<Vec<FieldPB>> for RepeatedFieldPB {
fn from(items: Vec<FieldPB>) -> Self {
Self { items }
}
}
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct RepeatedFieldIdPB {
#[pb(index = 1)]
pub items: Vec<FieldIdPB>,
}
impl std::ops::Deref for RepeatedFieldIdPB {
type Target = Vec<FieldIdPB>;
fn deref(&self) -> &Self::Target {
&self.items
}
}
impl std::convert::From<Vec<FieldIdPB>> for RepeatedFieldIdPB {
fn from(items: Vec<FieldIdPB>) -> Self {
RepeatedFieldIdPB { items }
}
}
impl std::convert::From<String> for RepeatedFieldIdPB {
fn from(s: String) -> Self {
RepeatedFieldIdPB {
items: vec![FieldIdPB::from(s)],
}
}
}
/// [TypeOptionChangesetPB] is used to update the type-option data.
#[derive(ProtoBuf, Default)]
pub struct TypeOptionChangesetPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub field_id: String,
/// Check out [TypeOptionPB] for more details.
#[pb(index = 3)]
pub type_option_data: Vec<u8>,
}
#[derive(Clone)]
pub struct TypeOptionChangesetParams {
pub view_id: String,
pub field_id: String,
pub type_option_data: Vec<u8>,
}
impl TryInto<TypeOptionChangesetParams> for TypeOptionChangesetPB {
type Error = ErrorCode;
fn try_into(self) -> Result<TypeOptionChangesetParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let _ = NotEmptyStr::parse(self.field_id.clone()).map_err(|_| ErrorCode::FieldIdIsEmpty)?;
Ok(TypeOptionChangesetParams {
view_id: view_id.0,
field_id: self.field_id,
type_option_data: self.type_option_data,
})
}
}
#[derive(ProtoBuf, Default)]
pub struct GetFieldPayloadPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2, one_of)]
pub field_ids: Option<RepeatedFieldIdPB>,
}
pub struct GetFieldParams {
pub view_id: String,
pub field_ids: Option<Vec<String>>,
}
impl TryInto<GetFieldParams> for GetFieldPayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<GetFieldParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let field_ids = self.field_ids.map(|repeated| {
repeated
.items
.into_iter()
.map(|item| item.field_id)
.collect::<Vec<String>>()
});
Ok(GetFieldParams {
view_id: view_id.0,
field_ids,
})
}
}
/// [FieldChangesetPB] is used to modify the corresponding field. It defines which properties of
/// the field can be modified.
///
/// Pass in None if you don't want to modify a property
/// Pass in Some(Value) if you want to modify a property
///
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct FieldChangesetPB {
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 2)]
pub view_id: String,
#[pb(index = 3, one_of)]
pub name: Option<String>,
#[pb(index = 4, one_of)]
pub desc: Option<String>,
#[pb(index = 5, one_of)]
pub field_type: Option<FieldType>,
#[pb(index = 6, one_of)]
pub frozen: Option<bool>,
#[pb(index = 7, one_of)]
pub visibility: Option<bool>,
#[pb(index = 8, one_of)]
pub width: Option<i32>,
// #[pb(index = 9, one_of)]
// pub type_option_data: Option<Vec<u8>>,
}
impl TryInto<FieldChangesetParams> for FieldChangesetPB {
type Error = ErrorCode;
fn try_into(self) -> Result<FieldChangesetParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let field_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::FieldIdIsEmpty)?;
let field_type = self.field_type.map(FieldTypeRevision::from);
// if let Some(type_option_data) = self.type_option_data.as_ref() {
// if type_option_data.is_empty() {
// return Err(ErrorCode::TypeOptionDataIsEmpty);
// }
// }
Ok(FieldChangesetParams {
field_id: field_id.0,
view_id: view_id.0,
name: self.name,
desc: self.desc,
field_type,
frozen: self.frozen,
visibility: self.visibility,
width: self.width,
// type_option_data: self.type_option_data,
})
}
}
#[derive(Debug, Clone, Default)]
pub struct FieldChangesetParams {
pub field_id: String,
pub view_id: String,
pub name: Option<String>,
pub desc: Option<String>,
pub field_type: Option<FieldTypeRevision>,
pub frozen: Option<bool>,
pub visibility: Option<bool>,
pub width: Option<i32>,
// pub type_option_data: Option<Vec<u8>>,
}
/// Certain field types have user-defined options such as color, date format, number format,
/// or a list of values for a multi-select list. These options are defined within a specialization
/// of the FieldTypeOption class.
///
/// You could check [this](https://appflowy.gitbook.io/docs/essential-documentation/contribute-to-appflowy/architecture/frontend/grid#fieldtype)
/// for more information.
///
/// The order of the enum can't be changed. If you want to add a new type,
/// it would be better to append it to the end of the list.
#[derive(
Debug,
Clone,
PartialEq,
Hash,
Eq,
ProtoBuf_Enum,
EnumCountMacro,
EnumString,
EnumIter,
Display,
Serialize_repr,
Deserialize_repr,
)]
#[repr(u8)]
pub enum FieldType {
RichText = 0,
Number = 1,
DateTime = 2,
SingleSelect = 3,
MultiSelect = 4,
Checkbox = 5,
URL = 6,
Checklist = 7,
}
pub const RICH_TEXT_FIELD: FieldType = FieldType::RichText;
pub const NUMBER_FIELD: FieldType = FieldType::Number;
pub const DATE_FIELD: FieldType = FieldType::DateTime;
pub const SINGLE_SELECT_FIELD: FieldType = FieldType::SingleSelect;
pub const MULTI_SELECT_FIELD: FieldType = FieldType::MultiSelect;
pub const CHECKBOX_FIELD: FieldType = FieldType::Checkbox;
pub const URL_FIELD: FieldType = FieldType::URL;
pub const CHECKLIST_FIELD: FieldType = FieldType::Checklist;
impl std::default::Default for FieldType {
fn default() -> Self {
FieldType::RichText
}
}
impl AsRef<FieldType> for FieldType {
fn as_ref(&self) -> &FieldType {
self
}
}
impl From<&FieldType> for FieldType {
fn from(field_type: &FieldType) -> Self {
field_type.clone()
}
}
impl FieldType {
pub fn type_id(&self) -> String {
(self.clone() as u8).to_string()
}
pub fn default_cell_width(&self) -> i32 {
match self {
FieldType::DateTime => 180,
_ => 150,
}
}
pub fn is_number(&self) -> bool {
self == &NUMBER_FIELD
}
pub fn is_text(&self) -> bool {
self == &RICH_TEXT_FIELD
}
pub fn is_checkbox(&self) -> bool {
self == &CHECKBOX_FIELD
}
pub fn is_date(&self) -> bool {
self == &DATE_FIELD
}
pub fn is_single_select(&self) -> bool {
self == &SINGLE_SELECT_FIELD
}
pub fn is_multi_select(&self) -> bool {
self == &MULTI_SELECT_FIELD
}
pub fn is_url(&self) -> bool {
self == &URL_FIELD
}
pub fn is_select_option(&self) -> bool {
self == &MULTI_SELECT_FIELD || self == &SINGLE_SELECT_FIELD
}
pub fn is_check_list(&self) -> bool {
self == &CHECKLIST_FIELD
}
pub fn can_be_group(&self) -> bool {
self.is_select_option() || self.is_checkbox() || self.is_url()
}
}
impl std::convert::From<&FieldType> for FieldTypeRevision {
fn from(ty: &FieldType) -> Self {
ty.clone() as u8
}
}
impl std::convert::From<FieldType> for FieldTypeRevision {
fn from(ty: FieldType) -> Self {
ty as u8
}
}
impl std::convert::From<&FieldTypeRevision> for FieldType {
fn from(ty: &FieldTypeRevision) -> Self {
FieldType::from(*ty)
}
}
impl std::convert::From<FieldTypeRevision> for FieldType {
fn from(ty: FieldTypeRevision) -> Self {
match ty {
0 => FieldType::RichText,
1 => FieldType::Number,
2 => FieldType::DateTime,
3 => FieldType::SingleSelect,
4 => FieldType::MultiSelect,
5 => FieldType::Checkbox,
6 => FieldType::URL,
7 => FieldType::Checklist,
_ => {
tracing::error!("Can't convert FieldTypeRevision: {} to FieldType", ty);
FieldType::RichText
},
}
}
}
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct DuplicateFieldPayloadPB {
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 2)]
pub view_id: String,
}
// #[derive(Debug, Clone, Default, ProtoBuf)]
// pub struct GridFieldIdentifierPayloadPB {
// #[pb(index = 1)]
// pub field_id: String,
//
// #[pb(index = 2)]
// pub view_id: String,
// }
impl TryInto<FieldIdParams> for DuplicateFieldPayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<FieldIdParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let field_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::FieldIdIsEmpty)?;
Ok(FieldIdParams {
view_id: view_id.0,
field_id: field_id.0,
})
}
}
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct DeleteFieldPayloadPB {
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 2)]
pub view_id: String,
}
impl TryInto<FieldIdParams> for DeleteFieldPayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<FieldIdParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let field_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::FieldIdIsEmpty)?;
Ok(FieldIdParams {
view_id: view_id.0,
field_id: field_id.0,
})
}
}
pub struct FieldIdParams {
pub field_id: String,
pub view_id: String,
}

View File

@ -1,62 +0,0 @@
use crate::services::filter::FromFilterString;
use database_model::FilterRevision;
use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
use flowy_error::ErrorCode;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct CheckboxFilterPB {
#[pb(index = 1)]
pub condition: CheckboxFilterConditionPB,
}
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)]
#[repr(u8)]
pub enum CheckboxFilterConditionPB {
IsChecked = 0,
IsUnChecked = 1,
}
impl std::convert::From<CheckboxFilterConditionPB> for u32 {
fn from(value: CheckboxFilterConditionPB) -> Self {
value as u32
}
}
impl std::default::Default for CheckboxFilterConditionPB {
fn default() -> Self {
CheckboxFilterConditionPB::IsChecked
}
}
impl std::convert::TryFrom<u8> for CheckboxFilterConditionPB {
type Error = ErrorCode;
fn try_from(value: u8) -> Result<Self, Self::Error> {
match value {
0 => Ok(CheckboxFilterConditionPB::IsChecked),
1 => Ok(CheckboxFilterConditionPB::IsUnChecked),
_ => Err(ErrorCode::InvalidData),
}
}
}
impl FromFilterString for CheckboxFilterPB {
fn from_filter_rev(filter_rev: &FilterRevision) -> Self
where
Self: Sized,
{
CheckboxFilterPB {
condition: CheckboxFilterConditionPB::try_from(filter_rev.condition)
.unwrap_or(CheckboxFilterConditionPB::IsChecked),
}
}
}
impl std::convert::From<&FilterRevision> for CheckboxFilterPB {
fn from(rev: &FilterRevision) -> Self {
CheckboxFilterPB {
condition: CheckboxFilterConditionPB::try_from(rev.condition)
.unwrap_or(CheckboxFilterConditionPB::IsChecked),
}
}
}

View File

@ -1,62 +0,0 @@
use crate::services::filter::FromFilterString;
use database_model::FilterRevision;
use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
use flowy_error::ErrorCode;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct ChecklistFilterPB {
#[pb(index = 1)]
pub condition: ChecklistFilterConditionPB,
}
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)]
#[repr(u8)]
pub enum ChecklistFilterConditionPB {
IsComplete = 0,
IsIncomplete = 1,
}
impl std::convert::From<ChecklistFilterConditionPB> for u32 {
fn from(value: ChecklistFilterConditionPB) -> Self {
value as u32
}
}
impl std::default::Default for ChecklistFilterConditionPB {
fn default() -> Self {
ChecklistFilterConditionPB::IsIncomplete
}
}
impl std::convert::TryFrom<u8> for ChecklistFilterConditionPB {
type Error = ErrorCode;
fn try_from(value: u8) -> Result<Self, Self::Error> {
match value {
0 => Ok(ChecklistFilterConditionPB::IsComplete),
1 => Ok(ChecklistFilterConditionPB::IsIncomplete),
_ => Err(ErrorCode::InvalidData),
}
}
}
impl FromFilterString for ChecklistFilterPB {
fn from_filter_rev(filter_rev: &FilterRevision) -> Self
where
Self: Sized,
{
ChecklistFilterPB {
condition: ChecklistFilterConditionPB::try_from(filter_rev.condition)
.unwrap_or(ChecklistFilterConditionPB::IsIncomplete),
}
}
}
impl std::convert::From<&FilterRevision> for ChecklistFilterPB {
fn from(rev: &FilterRevision) -> Self {
ChecklistFilterPB {
condition: ChecklistFilterConditionPB::try_from(rev.condition)
.unwrap_or(ChecklistFilterConditionPB::IsIncomplete),
}
}
}

View File

@ -1,122 +0,0 @@
use crate::services::filter::FromFilterString;
use database_model::FilterRevision;
use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
use flowy_error::ErrorCode;
use serde::{Deserialize, Serialize};
use std::str::FromStr;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct DateFilterPB {
#[pb(index = 1)]
pub condition: DateFilterConditionPB,
#[pb(index = 2, one_of)]
pub start: Option<i64>,
#[pb(index = 3, one_of)]
pub end: Option<i64>,
#[pb(index = 4, one_of)]
pub timestamp: Option<i64>,
}
#[derive(Deserialize, Serialize, Default, Clone, Debug)]
pub struct DateFilterContentPB {
pub start: Option<i64>,
pub end: Option<i64>,
pub timestamp: Option<i64>,
}
impl ToString for DateFilterContentPB {
fn to_string(&self) -> String {
serde_json::to_string(self).unwrap()
}
}
impl FromStr for DateFilterContentPB {
type Err = serde_json::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
serde_json::from_str(s)
}
}
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)]
#[repr(u8)]
pub enum DateFilterConditionPB {
DateIs = 0,
DateBefore = 1,
DateAfter = 2,
DateOnOrBefore = 3,
DateOnOrAfter = 4,
DateWithIn = 5,
DateIsEmpty = 6,
DateIsNotEmpty = 7,
}
impl std::convert::From<DateFilterConditionPB> for u32 {
fn from(value: DateFilterConditionPB) -> Self {
value as u32
}
}
impl std::default::Default for DateFilterConditionPB {
fn default() -> Self {
DateFilterConditionPB::DateIs
}
}
impl std::convert::TryFrom<u8> for DateFilterConditionPB {
type Error = ErrorCode;
fn try_from(value: u8) -> Result<Self, Self::Error> {
match value {
0 => Ok(DateFilterConditionPB::DateIs),
1 => Ok(DateFilterConditionPB::DateBefore),
2 => Ok(DateFilterConditionPB::DateAfter),
3 => Ok(DateFilterConditionPB::DateOnOrBefore),
4 => Ok(DateFilterConditionPB::DateOnOrAfter),
5 => Ok(DateFilterConditionPB::DateWithIn),
6 => Ok(DateFilterConditionPB::DateIsEmpty),
_ => Err(ErrorCode::InvalidData),
}
}
}
impl FromFilterString for DateFilterPB {
fn from_filter_rev(filter_rev: &FilterRevision) -> Self
where
Self: Sized,
{
let condition = DateFilterConditionPB::try_from(filter_rev.condition)
.unwrap_or(DateFilterConditionPB::DateIs);
let mut filter = DateFilterPB {
condition,
..Default::default()
};
if let Ok(content) = DateFilterContentPB::from_str(&filter_rev.content) {
filter.start = content.start;
filter.end = content.end;
filter.timestamp = content.timestamp;
};
filter
}
}
impl std::convert::From<&FilterRevision> for DateFilterPB {
fn from(rev: &FilterRevision) -> Self {
let condition =
DateFilterConditionPB::try_from(rev.condition).unwrap_or(DateFilterConditionPB::DateIs);
let mut filter = DateFilterPB {
condition,
..Default::default()
};
if let Ok(content) = DateFilterContentPB::from_str(&rev.content) {
filter.start = content.start;
filter.end = content.end;
filter.timestamp = content.timestamp;
};
filter
}
}

View File

@ -1,54 +0,0 @@
use crate::entities::FilterPB;
use flowy_derive::ProtoBuf;
#[derive(Debug, Default, ProtoBuf)]
pub struct FilterChangesetNotificationPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub insert_filters: Vec<FilterPB>,
#[pb(index = 3)]
pub delete_filters: Vec<FilterPB>,
#[pb(index = 4)]
pub update_filters: Vec<UpdatedFilter>,
}
#[derive(Debug, Default, ProtoBuf)]
pub struct UpdatedFilter {
#[pb(index = 1)]
pub filter_id: String,
#[pb(index = 2, one_of)]
pub filter: Option<FilterPB>,
}
impl FilterChangesetNotificationPB {
pub fn from_insert(view_id: &str, filters: Vec<FilterPB>) -> Self {
Self {
view_id: view_id.to_string(),
insert_filters: filters,
delete_filters: Default::default(),
update_filters: Default::default(),
}
}
pub fn from_delete(view_id: &str, filters: Vec<FilterPB>) -> Self {
Self {
view_id: view_id.to_string(),
insert_filters: Default::default(),
delete_filters: filters,
update_filters: Default::default(),
}
}
pub fn from_update(view_id: &str, filters: Vec<UpdatedFilter>) -> Self {
Self {
view_id: view_id.to_string(),
insert_filters: Default::default(),
delete_filters: Default::default(),
update_filters: filters,
}
}
}

View File

@ -1,17 +0,0 @@
mod checkbox_filter;
mod checklist_filter;
mod date_filter;
mod filter_changeset;
mod number_filter;
mod select_option_filter;
mod text_filter;
mod util;
pub use checkbox_filter::*;
pub use checklist_filter::*;
pub use date_filter::*;
pub use filter_changeset::*;
pub use number_filter::*;
pub use select_option_filter::*;
pub use text_filter::*;
pub use util::*;

View File

@ -1,77 +0,0 @@
use crate::services::filter::FromFilterString;
use database_model::FilterRevision;
use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
use flowy_error::ErrorCode;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct NumberFilterPB {
#[pb(index = 1)]
pub condition: NumberFilterConditionPB,
#[pb(index = 2)]
pub content: String,
}
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)]
#[repr(u8)]
pub enum NumberFilterConditionPB {
Equal = 0,
NotEqual = 1,
GreaterThan = 2,
LessThan = 3,
GreaterThanOrEqualTo = 4,
LessThanOrEqualTo = 5,
NumberIsEmpty = 6,
NumberIsNotEmpty = 7,
}
impl std::default::Default for NumberFilterConditionPB {
fn default() -> Self {
NumberFilterConditionPB::Equal
}
}
impl std::convert::From<NumberFilterConditionPB> for u32 {
fn from(value: NumberFilterConditionPB) -> Self {
value as u32
}
}
impl std::convert::TryFrom<u8> for NumberFilterConditionPB {
type Error = ErrorCode;
fn try_from(n: u8) -> Result<Self, Self::Error> {
match n {
0 => Ok(NumberFilterConditionPB::Equal),
1 => Ok(NumberFilterConditionPB::NotEqual),
2 => Ok(NumberFilterConditionPB::GreaterThan),
3 => Ok(NumberFilterConditionPB::LessThan),
4 => Ok(NumberFilterConditionPB::GreaterThanOrEqualTo),
5 => Ok(NumberFilterConditionPB::LessThanOrEqualTo),
6 => Ok(NumberFilterConditionPB::NumberIsEmpty),
7 => Ok(NumberFilterConditionPB::NumberIsNotEmpty),
_ => Err(ErrorCode::InvalidData),
}
}
}
impl FromFilterString for NumberFilterPB {
fn from_filter_rev(filter_rev: &FilterRevision) -> Self
where
Self: Sized,
{
NumberFilterPB {
condition: NumberFilterConditionPB::try_from(filter_rev.condition)
.unwrap_or(NumberFilterConditionPB::Equal),
content: filter_rev.content.clone(),
}
}
}
impl std::convert::From<&FilterRevision> for NumberFilterPB {
fn from(rev: &FilterRevision) -> Self {
NumberFilterPB {
condition: NumberFilterConditionPB::try_from(rev.condition)
.unwrap_or(NumberFilterConditionPB::Equal),
content: rev.content.clone(),
}
}
}

View File

@ -1,73 +0,0 @@
use crate::services::field::SelectOptionIds;
use crate::services::filter::FromFilterString;
use database_model::FilterRevision;
use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
use flowy_error::ErrorCode;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct SelectOptionFilterPB {
#[pb(index = 1)]
pub condition: SelectOptionConditionPB,
#[pb(index = 2)]
pub option_ids: Vec<String>,
}
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)]
#[repr(u8)]
pub enum SelectOptionConditionPB {
OptionIs = 0,
OptionIsNot = 1,
OptionIsEmpty = 2,
OptionIsNotEmpty = 3,
}
impl std::convert::From<SelectOptionConditionPB> for u32 {
fn from(value: SelectOptionConditionPB) -> Self {
value as u32
}
}
impl std::default::Default for SelectOptionConditionPB {
fn default() -> Self {
SelectOptionConditionPB::OptionIs
}
}
impl std::convert::TryFrom<u8> for SelectOptionConditionPB {
type Error = ErrorCode;
fn try_from(value: u8) -> Result<Self, Self::Error> {
match value {
0 => Ok(SelectOptionConditionPB::OptionIs),
1 => Ok(SelectOptionConditionPB::OptionIsNot),
2 => Ok(SelectOptionConditionPB::OptionIsEmpty),
3 => Ok(SelectOptionConditionPB::OptionIsNotEmpty),
_ => Err(ErrorCode::InvalidData),
}
}
}
impl FromFilterString for SelectOptionFilterPB {
fn from_filter_rev(filter_rev: &FilterRevision) -> Self
where
Self: Sized,
{
let ids = SelectOptionIds::from(filter_rev.content.clone());
SelectOptionFilterPB {
condition: SelectOptionConditionPB::try_from(filter_rev.condition)
.unwrap_or(SelectOptionConditionPB::OptionIs),
option_ids: ids.into_inner(),
}
}
}
impl std::convert::From<&FilterRevision> for SelectOptionFilterPB {
fn from(rev: &FilterRevision) -> Self {
let ids = SelectOptionIds::from(rev.content.clone());
SelectOptionFilterPB {
condition: SelectOptionConditionPB::try_from(rev.condition)
.unwrap_or(SelectOptionConditionPB::OptionIs),
option_ids: ids.into_inner(),
}
}
}

View File

@ -1,79 +0,0 @@
use crate::services::filter::FromFilterString;
use database_model::FilterRevision;
use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
use flowy_error::ErrorCode;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct TextFilterPB {
#[pb(index = 1)]
pub condition: TextFilterConditionPB,
#[pb(index = 2)]
pub content: String,
}
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)]
#[repr(u8)]
pub enum TextFilterConditionPB {
Is = 0,
IsNot = 1,
Contains = 2,
DoesNotContain = 3,
StartsWith = 4,
EndsWith = 5,
TextIsEmpty = 6,
TextIsNotEmpty = 7,
}
impl std::convert::From<TextFilterConditionPB> for u32 {
fn from(value: TextFilterConditionPB) -> Self {
value as u32
}
}
impl std::default::Default for TextFilterConditionPB {
fn default() -> Self {
TextFilterConditionPB::Is
}
}
impl std::convert::TryFrom<u8> for TextFilterConditionPB {
type Error = ErrorCode;
fn try_from(value: u8) -> Result<Self, Self::Error> {
match value {
0 => Ok(TextFilterConditionPB::Is),
1 => Ok(TextFilterConditionPB::IsNot),
2 => Ok(TextFilterConditionPB::Contains),
3 => Ok(TextFilterConditionPB::DoesNotContain),
4 => Ok(TextFilterConditionPB::StartsWith),
5 => Ok(TextFilterConditionPB::EndsWith),
6 => Ok(TextFilterConditionPB::TextIsEmpty),
7 => Ok(TextFilterConditionPB::TextIsNotEmpty),
_ => Err(ErrorCode::InvalidData),
}
}
}
impl FromFilterString for TextFilterPB {
fn from_filter_rev(filter_rev: &FilterRevision) -> Self
where
Self: Sized,
{
TextFilterPB {
condition: TextFilterConditionPB::try_from(filter_rev.condition)
.unwrap_or(TextFilterConditionPB::Is),
content: filter_rev.content.clone(),
}
}
}
impl std::convert::From<&FilterRevision> for TextFilterPB {
fn from(rev: &FilterRevision) -> Self {
TextFilterPB {
condition: TextFilterConditionPB::try_from(rev.condition)
.unwrap_or(TextFilterConditionPB::Is),
content: rev.content.clone(),
}
}
}

View File

@ -1,234 +0,0 @@
use crate::entities::parser::NotEmptyStr;
use crate::entities::{
CheckboxFilterPB, ChecklistFilterPB, DateFilterContentPB, DateFilterPB, FieldType,
NumberFilterPB, SelectOptionFilterPB, TextFilterPB,
};
use crate::services::field::SelectOptionIds;
use crate::services::filter::FilterType;
use bytes::Bytes;
use database_model::{FieldRevision, FieldTypeRevision, FilterRevision};
use flowy_derive::ProtoBuf;
use flowy_error::ErrorCode;
use std::convert::TryInto;
use std::sync::Arc;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct FilterPB {
#[pb(index = 1)]
pub id: String,
#[pb(index = 2)]
pub field_id: String,
#[pb(index = 3)]
pub field_type: FieldType,
#[pb(index = 4)]
pub data: Vec<u8>,
}
impl std::convert::From<&FilterRevision> for FilterPB {
fn from(rev: &FilterRevision) -> Self {
let field_type: FieldType = rev.field_type.into();
let bytes: Bytes = match field_type {
FieldType::RichText => TextFilterPB::from(rev).try_into().unwrap(),
FieldType::Number => NumberFilterPB::from(rev).try_into().unwrap(),
FieldType::DateTime => DateFilterPB::from(rev).try_into().unwrap(),
FieldType::SingleSelect => SelectOptionFilterPB::from(rev).try_into().unwrap(),
FieldType::MultiSelect => SelectOptionFilterPB::from(rev).try_into().unwrap(),
FieldType::Checklist => ChecklistFilterPB::from(rev).try_into().unwrap(),
FieldType::Checkbox => CheckboxFilterPB::from(rev).try_into().unwrap(),
FieldType::URL => TextFilterPB::from(rev).try_into().unwrap(),
};
Self {
id: rev.id.clone(),
field_id: rev.field_id.clone(),
field_type: rev.field_type.into(),
data: bytes.to_vec(),
}
}
}
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct RepeatedFilterPB {
#[pb(index = 1)]
pub items: Vec<FilterPB>,
}
impl std::convert::From<Vec<Arc<FilterRevision>>> for RepeatedFilterPB {
fn from(revs: Vec<Arc<FilterRevision>>) -> Self {
RepeatedFilterPB {
items: revs.into_iter().map(|rev| rev.as_ref().into()).collect(),
}
}
}
impl std::convert::From<Vec<FilterPB>> for RepeatedFilterPB {
fn from(items: Vec<FilterPB>) -> Self {
Self { items }
}
}
#[derive(ProtoBuf, Debug, Default, Clone)]
pub struct DeleteFilterPayloadPB {
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 2)]
pub field_type: FieldType,
#[pb(index = 3)]
pub filter_id: String,
#[pb(index = 4)]
pub view_id: String,
}
impl TryInto<DeleteFilterParams> for DeleteFilterPayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<DeleteFilterParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id)
.map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?
.0;
let field_id = NotEmptyStr::parse(self.field_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0;
let filter_id = NotEmptyStr::parse(self.filter_id)
.map_err(|_| ErrorCode::UnexpectedEmptyString)?
.0;
let filter_type = FilterType {
field_id,
field_type: self.field_type,
};
Ok(DeleteFilterParams {
view_id,
filter_id,
filter_type,
})
}
}
#[derive(Debug)]
pub struct DeleteFilterParams {
pub view_id: String,
pub filter_type: FilterType,
pub filter_id: String,
}
#[derive(ProtoBuf, Debug, Default, Clone)]
pub struct AlterFilterPayloadPB {
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 2)]
pub field_type: FieldType,
/// Create a new filter if the filter_id is None
#[pb(index = 3, one_of)]
pub filter_id: Option<String>,
#[pb(index = 4)]
pub data: Vec<u8>,
#[pb(index = 5)]
pub view_id: String,
}
impl AlterFilterPayloadPB {
#[allow(dead_code)]
pub fn new<T: TryInto<Bytes, Error = ::protobuf::ProtobufError>>(
view_id: &str,
field_rev: &FieldRevision,
data: T,
) -> Self {
let data = data.try_into().unwrap_or_else(|_| Bytes::new());
Self {
view_id: view_id.to_owned(),
field_id: field_rev.id.clone(),
field_type: field_rev.ty.into(),
filter_id: None,
data: data.to_vec(),
}
}
}
impl TryInto<AlterFilterParams> for AlterFilterPayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<AlterFilterParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id)
.map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?
.0;
let field_id = NotEmptyStr::parse(self.field_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0;
let filter_id = match self.filter_id {
None => None,
Some(filter_id) => Some(
NotEmptyStr::parse(filter_id)
.map_err(|_| ErrorCode::FilterIdIsEmpty)?
.0,
),
};
let condition;
let mut content = "".to_string();
let bytes: &[u8] = self.data.as_ref();
match self.field_type {
FieldType::RichText | FieldType::URL => {
let filter = TextFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8;
content = filter.content;
},
FieldType::Checkbox => {
let filter = CheckboxFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8;
},
FieldType::Number => {
let filter = NumberFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8;
content = filter.content;
},
FieldType::DateTime => {
let filter = DateFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8;
content = DateFilterContentPB {
start: filter.start,
end: filter.end,
timestamp: filter.timestamp,
}
.to_string();
},
FieldType::SingleSelect | FieldType::MultiSelect | FieldType::Checklist => {
let filter = SelectOptionFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8;
content = SelectOptionIds::from(filter.option_ids).to_string();
},
}
Ok(AlterFilterParams {
view_id,
field_id,
filter_id,
field_type: self.field_type.into(),
condition,
content,
})
}
}
#[derive(Debug)]
pub struct AlterFilterParams {
pub view_id: String,
pub field_id: String,
/// Create a new filter if the filter_id is None
pub filter_id: Option<String>,
pub field_type: FieldTypeRevision,
pub condition: u8,
pub content: String,
}

View File

@ -1,85 +0,0 @@
use database_model::{GroupRevision, SelectOptionGroupConfigurationRevision};
use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct UrlGroupConfigurationPB {
#[pb(index = 1)]
hide_empty: bool,
}
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct TextGroupConfigurationPB {
#[pb(index = 1)]
hide_empty: bool,
}
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct SelectOptionGroupConfigurationPB {
#[pb(index = 1)]
hide_empty: bool,
}
impl std::convert::From<SelectOptionGroupConfigurationRevision>
for SelectOptionGroupConfigurationPB
{
fn from(rev: SelectOptionGroupConfigurationRevision) -> Self {
Self {
hide_empty: rev.hide_empty,
}
}
}
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct GroupRecordPB {
#[pb(index = 1)]
group_id: String,
#[pb(index = 2)]
visible: bool,
}
impl std::convert::From<GroupRevision> for GroupRecordPB {
fn from(rev: GroupRevision) -> Self {
Self {
group_id: rev.id,
visible: rev.visible,
}
}
}
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct NumberGroupConfigurationPB {
#[pb(index = 1)]
hide_empty: bool,
}
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct DateGroupConfigurationPB {
#[pb(index = 1)]
pub condition: DateCondition,
#[pb(index = 2)]
hide_empty: bool,
}
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)]
#[repr(u8)]
pub enum DateCondition {
Relative = 0,
Day = 1,
Week = 2,
Month = 3,
Year = 4,
}
impl std::default::Default for DateCondition {
fn default() -> Self {
DateCondition::Relative
}
}
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct CheckboxGroupConfigurationPB {
#[pb(index = 1)]
pub(crate) hide_empty: bool,
}

View File

@ -1,182 +0,0 @@
use crate::entities::parser::NotEmptyStr;
use crate::entities::{FieldType, RowPB};
use crate::services::group::Group;
use database_model::{FieldTypeRevision, GroupConfigurationRevision};
use flowy_derive::ProtoBuf;
use flowy_error::ErrorCode;
use std::convert::TryInto;
use std::sync::Arc;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct GroupConfigurationPB {
#[pb(index = 1)]
pub id: String,
#[pb(index = 2)]
pub field_id: String,
}
impl std::convert::From<&GroupConfigurationRevision> for GroupConfigurationPB {
fn from(rev: &GroupConfigurationRevision) -> Self {
GroupConfigurationPB {
id: rev.id.clone(),
field_id: rev.field_id.clone(),
}
}
}
#[derive(ProtoBuf, Debug, Default, Clone)]
pub struct RepeatedGroupPB {
#[pb(index = 1)]
pub items: Vec<GroupPB>,
}
impl std::ops::Deref for RepeatedGroupPB {
type Target = Vec<GroupPB>;
fn deref(&self) -> &Self::Target {
&self.items
}
}
impl std::ops::DerefMut for RepeatedGroupPB {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.items
}
}
#[derive(ProtoBuf, Debug, Default, Clone)]
pub struct GroupPB {
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 2)]
pub group_id: String,
#[pb(index = 3)]
pub desc: String,
#[pb(index = 4)]
pub rows: Vec<RowPB>,
#[pb(index = 5)]
pub is_default: bool,
#[pb(index = 6)]
pub is_visible: bool,
}
impl std::convert::From<Group> for GroupPB {
fn from(group: Group) -> Self {
Self {
field_id: group.field_id,
group_id: group.id,
desc: group.name,
rows: group.rows,
is_default: group.is_default,
is_visible: group.is_visible,
}
}
}
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct RepeatedGroupConfigurationPB {
#[pb(index = 1)]
pub items: Vec<GroupConfigurationPB>,
}
impl std::convert::From<Vec<GroupConfigurationPB>> for RepeatedGroupConfigurationPB {
fn from(items: Vec<GroupConfigurationPB>) -> Self {
Self { items }
}
}
impl std::convert::From<Vec<Arc<GroupConfigurationRevision>>> for RepeatedGroupConfigurationPB {
fn from(revs: Vec<Arc<GroupConfigurationRevision>>) -> Self {
RepeatedGroupConfigurationPB {
items: revs.iter().map(|rev| rev.as_ref().into()).collect(),
}
}
}
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct InsertGroupPayloadPB {
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 2)]
pub field_type: FieldType,
#[pb(index = 3)]
pub view_id: String,
}
impl TryInto<InsertGroupParams> for InsertGroupPayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<InsertGroupParams, Self::Error> {
let field_id = NotEmptyStr::parse(self.field_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0;
let view_id = NotEmptyStr::parse(self.view_id)
.map_err(|_| ErrorCode::ViewIdIsInvalid)?
.0;
Ok(InsertGroupParams {
field_id,
field_type_rev: self.field_type.into(),
view_id,
})
}
}
pub struct InsertGroupParams {
pub view_id: String,
pub field_id: String,
pub field_type_rev: FieldTypeRevision,
}
#[derive(ProtoBuf, Debug, Default, Clone)]
pub struct DeleteGroupPayloadPB {
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 2)]
pub group_id: String,
#[pb(index = 3)]
pub field_type: FieldType,
#[pb(index = 4)]
pub view_id: String,
}
impl TryInto<DeleteGroupParams> for DeleteGroupPayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<DeleteGroupParams, Self::Error> {
let field_id = NotEmptyStr::parse(self.field_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0;
let group_id = NotEmptyStr::parse(self.group_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0;
let view_id = NotEmptyStr::parse(self.view_id)
.map_err(|_| ErrorCode::ViewIdIsInvalid)?
.0;
Ok(DeleteGroupParams {
field_id,
field_type_rev: self.field_type.into(),
group_id,
view_id,
})
}
}
pub struct DeleteGroupParams {
pub view_id: String,
pub field_id: String,
pub group_id: String,
pub field_type_rev: FieldTypeRevision,
}

View File

@ -1,163 +0,0 @@
use crate::entities::parser::NotEmptyStr;
use crate::entities::{GroupPB, InsertedRowPB, RowPB};
use flowy_derive::ProtoBuf;
use flowy_error::ErrorCode;
use std::fmt::Formatter;
#[derive(Debug, Default, ProtoBuf)]
pub struct GroupRowsNotificationPB {
#[pb(index = 1)]
pub group_id: String,
#[pb(index = 2, one_of)]
pub group_name: Option<String>,
#[pb(index = 3)]
pub inserted_rows: Vec<InsertedRowPB>,
#[pb(index = 4)]
pub deleted_rows: Vec<String>,
#[pb(index = 5)]
pub updated_rows: Vec<RowPB>,
}
impl std::fmt::Display for GroupRowsNotificationPB {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
for inserted_row in &self.inserted_rows {
f.write_fmt(format_args!(
"Insert: {} row at {:?}",
inserted_row.row.id, inserted_row.index
))?;
}
for deleted_row in &self.deleted_rows {
f.write_fmt(format_args!("Delete: {} row", deleted_row))?;
}
Ok(())
}
}
impl GroupRowsNotificationPB {
pub fn is_empty(&self) -> bool {
self.group_name.is_none()
&& self.inserted_rows.is_empty()
&& self.deleted_rows.is_empty()
&& self.updated_rows.is_empty()
}
pub fn new(group_id: String) -> Self {
Self {
group_id,
..Default::default()
}
}
pub fn name(group_id: String, name: &str) -> Self {
Self {
group_id,
group_name: Some(name.to_owned()),
..Default::default()
}
}
pub fn insert(group_id: String, inserted_rows: Vec<InsertedRowPB>) -> Self {
Self {
group_id,
inserted_rows,
..Default::default()
}
}
pub fn delete(group_id: String, deleted_rows: Vec<String>) -> Self {
Self {
group_id,
deleted_rows,
..Default::default()
}
}
pub fn update(group_id: String, updated_rows: Vec<RowPB>) -> Self {
Self {
group_id,
updated_rows,
..Default::default()
}
}
}
#[derive(Debug, Default, ProtoBuf)]
pub struct MoveGroupPayloadPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub from_group_id: String,
#[pb(index = 3)]
pub to_group_id: String,
}
#[derive(Debug)]
pub struct MoveGroupParams {
pub view_id: String,
pub from_group_id: String,
pub to_group_id: String,
}
impl TryInto<MoveGroupParams> for MoveGroupPayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<MoveGroupParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id)
.map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?
.0;
let from_group_id = NotEmptyStr::parse(self.from_group_id)
.map_err(|_| ErrorCode::GroupIdIsEmpty)?
.0;
let to_group_id = NotEmptyStr::parse(self.to_group_id)
.map_err(|_| ErrorCode::GroupIdIsEmpty)?
.0;
Ok(MoveGroupParams {
view_id,
from_group_id,
to_group_id,
})
}
}
#[derive(Debug, Default, ProtoBuf)]
pub struct GroupChangesetPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub inserted_groups: Vec<InsertedGroupPB>,
#[pb(index = 3)]
pub initial_groups: Vec<GroupPB>,
#[pb(index = 4)]
pub deleted_groups: Vec<String>,
#[pb(index = 5)]
pub update_groups: Vec<GroupPB>,
}
impl GroupChangesetPB {
pub fn is_empty(&self) -> bool {
self.initial_groups.is_empty()
&& self.inserted_groups.is_empty()
&& self.deleted_groups.is_empty()
&& self.update_groups.is_empty()
}
}
#[derive(Debug, Default, ProtoBuf)]
pub struct InsertedGroupPB {
#[pb(index = 1)]
pub group: GroupPB,
#[pb(index = 2)]
pub index: i32,
}

View File

@ -1,7 +0,0 @@
mod configuration;
mod group;
mod group_changeset;
pub use configuration::*;
pub use group::*;
pub use group_changeset::*;

View File

@ -1,23 +0,0 @@
mod calendar_entities;
mod cell_entities;
mod database_entities;
mod field_entities;
pub mod filter_entities;
mod group_entities;
pub mod parser;
mod row_entities;
pub mod setting_entities;
mod sort_entities;
mod view_entities;
pub use calendar_entities::*;
pub use cell_entities::*;
pub use database_entities::*;
pub use database_entities::*;
pub use field_entities::*;
pub use filter_entities::*;
pub use group_entities::*;
pub use row_entities::*;
pub use setting_entities::*;
pub use sort_entities::*;
pub use view_entities::*;

View File

@ -1,17 +0,0 @@
#[derive(Debug)]
pub struct NotEmptyStr(pub String);
impl NotEmptyStr {
pub fn parse(s: String) -> Result<Self, String> {
if s.trim().is_empty() {
return Err("Input string is empty".to_owned());
}
Ok(Self(s))
}
}
impl AsRef<str> for NotEmptyStr {
fn as_ref(&self) -> &str {
&self.0
}
}

View File

@ -1,224 +0,0 @@
use crate::entities::parser::NotEmptyStr;
use database_model::RowRevision;
use flowy_derive::ProtoBuf;
use flowy_error::ErrorCode;
use std::collections::HashMap;
use std::sync::Arc;
/// [RowPB] Describes a row. Has the id of the parent Block. Has the metadata of the row.
#[derive(Debug, Default, Clone, ProtoBuf, Eq, PartialEq)]
pub struct RowPB {
#[pb(index = 1)]
pub block_id: String,
#[pb(index = 2)]
pub id: String,
#[pb(index = 3)]
pub height: i32,
}
impl RowPB {
pub fn row_id(&self) -> &str {
&self.id
}
pub fn block_id(&self) -> &str {
&self.block_id
}
}
impl std::convert::From<&RowRevision> for RowPB {
fn from(rev: &RowRevision) -> Self {
Self {
block_id: rev.block_id.clone(),
id: rev.id.clone(),
height: rev.height,
}
}
}
impl std::convert::From<&mut RowRevision> for RowPB {
fn from(rev: &mut RowRevision) -> Self {
Self {
block_id: rev.block_id.clone(),
id: rev.id.clone(),
height: rev.height,
}
}
}
impl std::convert::From<&Arc<RowRevision>> for RowPB {
fn from(rev: &Arc<RowRevision>) -> Self {
Self {
block_id: rev.block_id.clone(),
id: rev.id.clone(),
height: rev.height,
}
}
}
#[derive(Debug, Default, ProtoBuf)]
pub struct OptionalRowPB {
#[pb(index = 1, one_of)]
pub row: Option<RowPB>,
}
#[derive(Debug, Default, ProtoBuf)]
pub struct RepeatedRowPB {
#[pb(index = 1)]
pub items: Vec<RowPB>,
}
impl std::convert::From<Vec<RowPB>> for RepeatedRowPB {
fn from(items: Vec<RowPB>) -> Self {
Self { items }
}
}
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct InsertedRowPB {
#[pb(index = 1)]
pub row: RowPB,
#[pb(index = 2, one_of)]
pub index: Option<i32>,
#[pb(index = 3)]
pub is_new: bool,
}
impl InsertedRowPB {
pub fn new(row: RowPB) -> Self {
Self {
row,
index: None,
is_new: false,
}
}
pub fn with_index(row: RowPB, index: i32) -> Self {
Self {
row,
index: Some(index),
is_new: false,
}
}
}
impl std::convert::From<RowPB> for InsertedRowPB {
fn from(row: RowPB) -> Self {
Self {
row,
index: None,
is_new: false,
}
}
}
impl std::convert::From<&RowRevision> for InsertedRowPB {
fn from(row: &RowRevision) -> Self {
let row_order = RowPB::from(row);
Self::from(row_order)
}
}
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct UpdatedRowPB {
#[pb(index = 1)]
pub row: RowPB,
// represents as the cells that were updated in this row.
#[pb(index = 2)]
pub field_ids: Vec<String>,
}
#[derive(Debug, Default, Clone, ProtoBuf)]
pub struct RowIdPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub row_id: String,
}
pub struct RowIdParams {
pub view_id: String,
pub row_id: String,
}
impl TryInto<RowIdParams> for RowIdPB {
type Error = ErrorCode;
fn try_into(self) -> Result<RowIdParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let row_id = NotEmptyStr::parse(self.row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?;
Ok(RowIdParams {
view_id: view_id.0,
row_id: row_id.0,
})
}
}
#[derive(Debug, Default, Clone, ProtoBuf)]
pub struct BlockRowIdPB {
#[pb(index = 1)]
pub block_id: String,
#[pb(index = 2)]
pub row_id: String,
}
#[derive(ProtoBuf, Default)]
pub struct CreateRowPayloadPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2, one_of)]
pub start_row_id: Option<String>,
#[pb(index = 3, one_of)]
pub group_id: Option<String>,
#[pb(index = 4, one_of)]
pub data: Option<RowDataPB>,
}
#[derive(ProtoBuf, Default)]
pub struct RowDataPB {
#[pb(index = 1)]
pub cell_data_by_field_id: HashMap<String, String>,
}
#[derive(Default)]
pub struct CreateRowParams {
pub view_id: String,
pub start_row_id: Option<String>,
pub group_id: Option<String>,
pub cell_data_by_field_id: Option<HashMap<String, String>>,
}
impl TryInto<CreateRowParams> for CreateRowPayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<CreateRowParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::ViewIdIsInvalid)?;
let start_row_id = match self.start_row_id {
None => None,
Some(start_row_id) => Some(
NotEmptyStr::parse(start_row_id)
.map_err(|_| ErrorCode::RowIdIsEmpty)?
.0,
),
};
Ok(CreateRowParams {
view_id: view_id.0,
start_row_id,
group_id: self.group_id,
cell_data_by_field_id: self.data.map(|data| data.cell_data_by_field_id),
})
}
}

View File

@ -1,225 +0,0 @@
use crate::entities::parser::NotEmptyStr;
use crate::entities::{
AlterFilterParams, AlterFilterPayloadPB, AlterSortParams, AlterSortPayloadPB,
CalendarLayoutSettingsPB, DeleteFilterParams, DeleteFilterPayloadPB, DeleteGroupParams,
DeleteGroupPayloadPB, DeleteSortParams, DeleteSortPayloadPB, InsertGroupParams,
InsertGroupPayloadPB, RepeatedFilterPB, RepeatedGroupConfigurationPB, RepeatedSortPB,
};
use database_model::{CalendarLayoutSetting, LayoutRevision};
use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
use flowy_error::ErrorCode;
use std::convert::TryInto;
use strum_macros::EnumIter;
/// [DatabaseViewSettingPB] defines the setting options for the grid. Such as the filter, group, and sort.
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct DatabaseViewSettingPB {
#[pb(index = 1)]
pub current_layout: DatabaseLayoutPB,
#[pb(index = 2)]
pub layout_setting: LayoutSettingPB,
#[pb(index = 3)]
pub filters: RepeatedFilterPB,
#[pb(index = 4)]
pub group_configurations: RepeatedGroupConfigurationPB,
#[pb(index = 5)]
pub sorts: RepeatedSortPB,
}
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum, EnumIter)]
#[repr(u8)]
pub enum DatabaseLayoutPB {
Grid = 0,
Board = 1,
Calendar = 2,
}
impl std::default::Default for DatabaseLayoutPB {
fn default() -> Self {
DatabaseLayoutPB::Grid
}
}
impl std::convert::From<LayoutRevision> for DatabaseLayoutPB {
fn from(rev: LayoutRevision) -> Self {
match rev {
LayoutRevision::Grid => DatabaseLayoutPB::Grid,
LayoutRevision::Board => DatabaseLayoutPB::Board,
LayoutRevision::Calendar => DatabaseLayoutPB::Calendar,
}
}
}
impl std::convert::From<DatabaseLayoutPB> for LayoutRevision {
fn from(layout: DatabaseLayoutPB) -> Self {
match layout {
DatabaseLayoutPB::Grid => LayoutRevision::Grid,
DatabaseLayoutPB::Board => LayoutRevision::Board,
DatabaseLayoutPB::Calendar => LayoutRevision::Calendar,
}
}
}
#[derive(Default, ProtoBuf)]
pub struct DatabaseSettingChangesetPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub layout_type: DatabaseLayoutPB,
#[pb(index = 3, one_of)]
pub alter_filter: Option<AlterFilterPayloadPB>,
#[pb(index = 4, one_of)]
pub delete_filter: Option<DeleteFilterPayloadPB>,
#[pb(index = 5, one_of)]
pub insert_group: Option<InsertGroupPayloadPB>,
#[pb(index = 6, one_of)]
pub delete_group: Option<DeleteGroupPayloadPB>,
#[pb(index = 7, one_of)]
pub alter_sort: Option<AlterSortPayloadPB>,
#[pb(index = 8, one_of)]
pub delete_sort: Option<DeleteSortPayloadPB>,
}
impl TryInto<DatabaseSettingChangesetParams> for DatabaseSettingChangesetPB {
type Error = ErrorCode;
fn try_into(self) -> Result<DatabaseSettingChangesetParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id)
.map_err(|_| ErrorCode::ViewIdIsInvalid)?
.0;
let insert_filter = match self.alter_filter {
None => None,
Some(payload) => Some(payload.try_into()?),
};
let delete_filter = match self.delete_filter {
None => None,
Some(payload) => Some(payload.try_into()?),
};
let insert_group = match self.insert_group {
Some(payload) => Some(payload.try_into()?),
None => None,
};
let delete_group = match self.delete_group {
Some(payload) => Some(payload.try_into()?),
None => None,
};
let alert_sort = match self.alter_sort {
None => None,
Some(payload) => Some(payload.try_into()?),
};
let delete_sort = match self.delete_sort {
None => None,
Some(payload) => Some(payload.try_into()?),
};
Ok(DatabaseSettingChangesetParams {
view_id,
layout_type: self.layout_type.into(),
insert_filter,
delete_filter,
insert_group,
delete_group,
alert_sort,
delete_sort,
})
}
}
pub struct DatabaseSettingChangesetParams {
pub view_id: String,
pub layout_type: LayoutRevision,
pub insert_filter: Option<AlterFilterParams>,
pub delete_filter: Option<DeleteFilterParams>,
pub insert_group: Option<InsertGroupParams>,
pub delete_group: Option<DeleteGroupParams>,
pub alert_sort: Option<AlterSortParams>,
pub delete_sort: Option<DeleteSortParams>,
}
impl DatabaseSettingChangesetParams {
pub fn is_filter_changed(&self) -> bool {
self.insert_filter.is_some() || self.delete_filter.is_some()
}
}
#[derive(Debug, Eq, PartialEq, Default, ProtoBuf, Clone)]
pub struct UpdateLayoutSettingPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub layout_setting: LayoutSettingPB,
}
#[derive(Debug)]
pub struct UpdateLayoutSettingParams {
pub view_id: String,
pub layout_setting: LayoutSettingParams,
}
impl TryInto<UpdateLayoutSettingParams> for UpdateLayoutSettingPB {
type Error = ErrorCode;
fn try_into(self) -> Result<UpdateLayoutSettingParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id)
.map_err(|_| ErrorCode::ViewIdIsInvalid)?
.0;
let layout_setting: LayoutSettingParams = self.layout_setting.into();
Ok(UpdateLayoutSettingParams {
view_id,
layout_setting,
})
}
}
#[derive(Debug, Eq, PartialEq, Default, ProtoBuf, Clone)]
pub struct LayoutSettingPB {
#[pb(index = 1, one_of)]
pub calendar: Option<CalendarLayoutSettingsPB>,
}
impl LayoutSettingPB {
pub fn new() -> Self {
Self::default()
}
}
impl std::convert::From<LayoutSettingParams> for LayoutSettingPB {
fn from(params: LayoutSettingParams) -> Self {
Self {
calendar: params.calendar.map(|calender| calender.into()),
}
}
}
impl std::convert::From<LayoutSettingPB> for LayoutSettingParams {
fn from(params: LayoutSettingPB) -> Self {
Self {
calendar: params.calendar.map(|calender| calender.into()),
}
}
}
#[derive(Debug, Default, Clone)]
pub struct LayoutSettingParams {
pub calendar: Option<CalendarLayoutSetting>,
}

View File

@ -1,239 +0,0 @@
use crate::entities::parser::NotEmptyStr;
use crate::entities::FieldType;
use crate::services::sort::SortType;
use std::sync::Arc;
use database_model::{FieldTypeRevision, SortCondition, SortRevision};
use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
use flowy_error::ErrorCode;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct SortPB {
#[pb(index = 1)]
pub id: String,
#[pb(index = 2)]
pub field_id: String,
#[pb(index = 3)]
pub field_type: FieldType,
#[pb(index = 4)]
pub condition: SortConditionPB,
}
impl std::convert::From<&SortRevision> for SortPB {
fn from(sort_rev: &SortRevision) -> Self {
Self {
id: sort_rev.id.clone(),
field_id: sort_rev.field_id.clone(),
field_type: sort_rev.field_type.into(),
condition: sort_rev.condition.clone().into(),
}
}
}
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct RepeatedSortPB {
#[pb(index = 1)]
pub items: Vec<SortPB>,
}
impl std::convert::From<Vec<Arc<SortRevision>>> for RepeatedSortPB {
fn from(revs: Vec<Arc<SortRevision>>) -> Self {
RepeatedSortPB {
items: revs.into_iter().map(|rev| rev.as_ref().into()).collect(),
}
}
}
impl std::convert::From<Vec<SortPB>> for RepeatedSortPB {
fn from(items: Vec<SortPB>) -> Self {
Self { items }
}
}
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)]
#[repr(u8)]
pub enum SortConditionPB {
Ascending = 0,
Descending = 1,
}
impl std::default::Default for SortConditionPB {
fn default() -> Self {
Self::Ascending
}
}
impl std::convert::From<SortCondition> for SortConditionPB {
fn from(condition: SortCondition) -> Self {
match condition {
SortCondition::Ascending => SortConditionPB::Ascending,
SortCondition::Descending => SortConditionPB::Descending,
}
}
}
#[derive(ProtoBuf, Debug, Default, Clone)]
pub struct AlterSortPayloadPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub field_id: String,
#[pb(index = 3)]
pub field_type: FieldType,
/// Create a new sort if the sort_id is None
#[pb(index = 4, one_of)]
pub sort_id: Option<String>,
#[pb(index = 5)]
pub condition: SortConditionPB,
}
impl TryInto<AlterSortParams> for AlterSortPayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<AlterSortParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id)
.map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?
.0;
let field_id = NotEmptyStr::parse(self.field_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0;
let sort_id = match self.sort_id {
None => None,
Some(sort_id) => Some(
NotEmptyStr::parse(sort_id)
.map_err(|_| ErrorCode::SortIdIsEmpty)?
.0,
),
};
Ok(AlterSortParams {
view_id,
field_id,
sort_id,
field_type: self.field_type.into(),
condition: self.condition as u8,
})
}
}
#[derive(Debug)]
pub struct AlterSortParams {
pub view_id: String,
pub field_id: String,
/// Create a new sort if the sort is None
pub sort_id: Option<String>,
pub field_type: FieldTypeRevision,
pub condition: u8,
}
#[derive(ProtoBuf, Debug, Default, Clone)]
pub struct DeleteSortPayloadPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub field_id: String,
#[pb(index = 3)]
pub field_type: FieldType,
#[pb(index = 4)]
pub sort_id: String,
}
impl TryInto<DeleteSortParams> for DeleteSortPayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<DeleteSortParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id)
.map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?
.0;
let field_id = NotEmptyStr::parse(self.field_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0;
let sort_id = NotEmptyStr::parse(self.sort_id)
.map_err(|_| ErrorCode::UnexpectedEmptyString)?
.0;
let sort_type = SortType {
field_id,
field_type: self.field_type,
};
Ok(DeleteSortParams {
view_id,
sort_type,
sort_id,
})
}
}
#[derive(Debug, Clone)]
pub struct DeleteSortParams {
pub view_id: String,
pub sort_type: SortType,
pub sort_id: String,
}
#[derive(Debug, Default, ProtoBuf)]
pub struct SortChangesetNotificationPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub insert_sorts: Vec<SortPB>,
#[pb(index = 3)]
pub delete_sorts: Vec<SortPB>,
#[pb(index = 4)]
pub update_sorts: Vec<SortPB>,
}
impl SortChangesetNotificationPB {
pub fn new(view_id: String) -> Self {
Self {
view_id,
insert_sorts: vec![],
delete_sorts: vec![],
update_sorts: vec![],
}
}
pub fn extend(&mut self, other: SortChangesetNotificationPB) {
self.insert_sorts.extend(other.insert_sorts);
self.delete_sorts.extend(other.delete_sorts);
self.update_sorts.extend(other.update_sorts);
}
pub fn is_empty(&self) -> bool {
self.insert_sorts.is_empty() && self.delete_sorts.is_empty() && self.update_sorts.is_empty()
}
}
#[derive(Debug, Default, ProtoBuf)]
pub struct ReorderAllRowsPB {
#[pb(index = 1)]
pub row_orders: Vec<String>,
}
#[derive(Debug, Default, ProtoBuf)]
pub struct ReorderSingleRowPB {
#[pb(index = 1)]
pub row_id: String,
#[pb(index = 2)]
pub old_index: i32,
#[pb(index = 3)]
pub new_index: i32,
}

View File

@ -1,68 +0,0 @@
use crate::entities::{InsertedRowPB, UpdatedRowPB};
use flowy_derive::ProtoBuf;
#[derive(Debug, Default, Clone, ProtoBuf)]
pub struct RowsVisibilityChangesetPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 5)]
pub visible_rows: Vec<InsertedRowPB>,
#[pb(index = 6)]
pub invisible_rows: Vec<String>,
}
#[derive(Debug, Default, Clone, ProtoBuf)]
pub struct RowsChangesetPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub inserted_rows: Vec<InsertedRowPB>,
#[pb(index = 3)]
pub deleted_rows: Vec<String>,
#[pb(index = 4)]
pub updated_rows: Vec<UpdatedRowPB>,
}
impl RowsChangesetPB {
pub fn from_insert(view_id: String, inserted_rows: Vec<InsertedRowPB>) -> Self {
Self {
view_id,
inserted_rows,
..Default::default()
}
}
pub fn from_delete(view_id: String, deleted_rows: Vec<String>) -> Self {
Self {
view_id,
deleted_rows,
..Default::default()
}
}
pub fn from_update(view_id: String, updated_rows: Vec<UpdatedRowPB>) -> Self {
Self {
view_id,
updated_rows,
..Default::default()
}
}
pub fn from_move(
view_id: String,
deleted_rows: Vec<String>,
inserted_rows: Vec<InsertedRowPB>,
) -> Self {
Self {
view_id,
inserted_rows,
deleted_rows,
..Default::default()
}
}
}

View File

@ -1,646 +0,0 @@
use crate::entities::*;
use crate::manager::DatabaseManager;
use crate::services::cell::{FromCellString, ToCellChangesetString, TypeCellData};
use crate::services::field::{
default_type_option_builder_from_type, select_type_option_from_field_rev,
type_option_builder_from_json_str, DateCellChangeset, DateChangesetPB, SelectOptionCellChangeset,
SelectOptionCellChangesetPB, SelectOptionCellChangesetParams, SelectOptionCellDataPB,
SelectOptionChangeset, SelectOptionChangesetPB, SelectOptionIds, SelectOptionPB,
};
use crate::services::row::make_row_from_row_rev;
use database_model::FieldRevision;
use flowy_error::{ErrorCode, FlowyError, FlowyResult};
use lib_dispatch::prelude::{data_result_ok, AFPluginData, AFPluginState, DataResult};
use std::sync::Arc;
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_database_data_handler(
data: AFPluginData<DatabaseViewIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<DatabasePB, FlowyError> {
let view_id: DatabaseViewIdPB = data.into_inner();
let editor = manager.open_database_view(view_id.as_ref()).await?;
let database = editor.get_database(view_id.as_ref()).await?;
data_result_ok(database)
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_database_setting_handler(
data: AFPluginData<DatabaseViewIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<DatabaseViewSettingPB, FlowyError> {
let view_id: DatabaseViewIdPB = data.into_inner();
let editor = manager.open_database_view(view_id.as_ref()).await?;
let database_setting = editor.get_setting(view_id.as_ref()).await?;
data_result_ok(database_setting)
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn update_database_setting_handler(
data: AFPluginData<DatabaseSettingChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let params: DatabaseSettingChangesetParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.view_id).await?;
if let Some(insert_params) = params.insert_group {
editor.insert_group(insert_params).await?;
}
if let Some(delete_params) = params.delete_group {
editor.delete_group(delete_params).await?;
}
if let Some(alter_filter) = params.insert_filter {
editor.create_or_update_filter(alter_filter).await?;
}
if let Some(delete_filter) = params.delete_filter {
editor.delete_filter(delete_filter).await?;
}
if let Some(alter_sort) = params.alert_sort {
let _ = editor.create_or_update_sort(alter_sort).await?;
}
if let Some(delete_sort) = params.delete_sort {
editor.delete_sort(delete_sort).await?;
}
Ok(())
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_all_filters_handler(
data: AFPluginData<DatabaseViewIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<RepeatedFilterPB, FlowyError> {
let view_id: DatabaseViewIdPB = data.into_inner();
let editor = manager.open_database_view(view_id.as_ref()).await?;
let filters = RepeatedFilterPB {
items: editor.get_all_filters(view_id.as_ref()).await?,
};
data_result_ok(filters)
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_all_sorts_handler(
data: AFPluginData<DatabaseViewIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<RepeatedSortPB, FlowyError> {
let view_id: DatabaseViewIdPB = data.into_inner();
let editor = manager.open_database_view(view_id.as_ref()).await?;
let sorts = RepeatedSortPB {
items: editor.get_all_sorts(view_id.as_ref()).await?,
};
data_result_ok(sorts)
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn delete_all_sorts_handler(
data: AFPluginData<DatabaseViewIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let view_id: DatabaseViewIdPB = data.into_inner();
let editor = manager.open_database_view(view_id.as_ref()).await?;
editor.delete_all_sorts(view_id.as_ref()).await?;
Ok(())
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_fields_handler(
data: AFPluginData<GetFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<RepeatedFieldPB, FlowyError> {
let params: GetFieldParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.view_id).await?;
let field_revs = editor.get_field_revs(params.field_ids).await?;
let repeated_field: RepeatedFieldPB = field_revs
.into_iter()
.map(FieldPB::from)
.collect::<Vec<_>>()
.into();
data_result_ok(repeated_field)
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn update_field_handler(
data: AFPluginData<FieldChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let changeset: FieldChangesetParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&changeset.view_id).await?;
editor.update_field(changeset).await?;
Ok(())
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn update_field_type_option_handler(
data: AFPluginData<TypeOptionChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let params: TypeOptionChangesetParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.view_id).await?;
let old_field_rev = editor.get_field_rev(&params.field_id).await;
editor
.update_field_type_option(
&params.view_id,
&params.field_id,
params.type_option_data,
old_field_rev,
)
.await?;
Ok(())
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn delete_field_handler(
data: AFPluginData<DeleteFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let params: FieldIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.view_id).await?;
editor.delete_field(&params.field_id).await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn switch_to_field_handler(
data: AFPluginData<UpdateFieldTypePayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let params: EditFieldParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.view_id).await?;
let old_field_rev = editor.get_field_rev(&params.field_id).await;
editor
.switch_to_field_type(&params.field_id, &params.field_type)
.await?;
// Get the field_rev with field_id, if it doesn't exist, we create the default FieldRevision from the FieldType.
let new_field_rev = editor
.get_field_rev(&params.field_id)
.await
.unwrap_or(Arc::new(editor.next_field_rev(&params.field_type).await?));
// Update the type-option data after the field type has been changed
let type_option_data = get_type_option_data(&new_field_rev, &params.field_type).await?;
editor
.update_field_type_option(
&params.view_id,
&new_field_rev.id,
type_option_data,
old_field_rev,
)
.await?;
Ok(())
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn duplicate_field_handler(
data: AFPluginData<DuplicateFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let params: FieldIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.view_id).await?;
editor.duplicate_field(&params.field_id).await?;
Ok(())
}
/// Return the FieldTypeOptionData if the Field exists otherwise return record not found error.
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_field_type_option_data_handler(
data: AFPluginData<TypeOptionPathPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<TypeOptionPB, FlowyError> {
let params: TypeOptionPathParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.view_id).await?;
match editor.get_field_rev(&params.field_id).await {
None => Err(FlowyError::record_not_found()),
Some(field_rev) => {
let field_type = field_rev.ty.into();
let type_option_data = get_type_option_data(&field_rev, &field_type).await?;
let data = TypeOptionPB {
view_id: params.view_id,
field: field_rev.into(),
type_option_data,
};
data_result_ok(data)
},
}
}
/// Create FieldMeta and save it. Return the FieldTypeOptionData.
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn create_field_type_option_data_handler(
data: AFPluginData<CreateFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<TypeOptionPB, FlowyError> {
let params: CreateFieldParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.view_id).await?;
let field_rev = editor
.create_new_field_rev_with_type_option(&params.field_type, params.type_option_data)
.await?;
let field_type: FieldType = field_rev.ty.into();
let type_option_data = get_type_option_data(&field_rev, &field_type).await?;
data_result_ok(TypeOptionPB {
view_id: params.view_id,
field: field_rev.into(),
type_option_data,
})
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn move_field_handler(
data: AFPluginData<MoveFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let params: MoveFieldParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.view_id).await?;
editor.move_field(params).await?;
Ok(())
}
/// The [FieldRevision] contains multiple data, each of them belongs to a specific FieldType.
async fn get_type_option_data(
field_rev: &FieldRevision,
field_type: &FieldType,
) -> FlowyResult<Vec<u8>> {
let s = field_rev
.get_type_option_str(field_type)
.map(|value| value.to_owned())
.unwrap_or_else(|| {
default_type_option_builder_from_type(field_type)
.serializer()
.json_str()
});
let field_type: FieldType = field_rev.ty.into();
let builder = type_option_builder_from_json_str(&s, &field_type);
let type_option_data = builder.serializer().protobuf_bytes().to_vec();
Ok(type_option_data)
}
// #[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn get_row_handler(
data: AFPluginData<RowIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<OptionalRowPB, FlowyError> {
let params: RowIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.view_id).await?;
let row = editor
.get_row_rev(&params.row_id)
.await?
.map(make_row_from_row_rev);
data_result_ok(OptionalRowPB { row })
}
#[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn delete_row_handler(
data: AFPluginData<RowIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let params: RowIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.view_id).await?;
editor.delete_row(&params.row_id).await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn duplicate_row_handler(
data: AFPluginData<RowIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let params: RowIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.view_id).await?;
editor
.duplicate_row(&params.view_id, &params.row_id)
.await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn move_row_handler(
data: AFPluginData<MoveRowPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let params: MoveRowParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.view_id).await?;
editor.move_row(params).await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn create_row_handler(
data: AFPluginData<CreateRowPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<RowPB, FlowyError> {
let params: CreateRowParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(params.view_id.as_ref()).await?;
let row = editor.create_row(params).await?;
data_result_ok(row)
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn get_cell_handler(
data: AFPluginData<CellIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<CellPB, FlowyError> {
let params: CellIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.view_id).await?;
match editor.get_cell(&params).await {
None => data_result_ok(CellPB::empty(&params.field_id, &params.row_id)),
Some(cell) => data_result_ok(cell),
}
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn update_cell_handler(
data: AFPluginData<CellChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let changeset: CellChangesetPB = data.into_inner();
let editor = manager.get_database_editor(&changeset.view_id).await?;
editor
.update_cell_with_changeset(
&changeset.row_id,
&changeset.field_id,
changeset.type_cell_data,
)
.await?;
Ok(())
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn new_select_option_handler(
data: AFPluginData<CreateSelectOptionPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<SelectOptionPB, FlowyError> {
let params: CreateSelectOptionParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.view_id).await?;
match editor.get_field_rev(&params.field_id).await {
None => Err(ErrorCode::InvalidData.into()),
Some(field_rev) => {
let type_option = select_type_option_from_field_rev(&field_rev)?;
let select_option = type_option.create_option(&params.option_name);
data_result_ok(select_option)
},
}
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn update_select_option_handler(
data: AFPluginData<SelectOptionChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let changeset: SelectOptionChangeset = data.into_inner().try_into()?;
let editor = manager
.get_database_editor(&changeset.cell_path.view_id)
.await?;
let field_id = changeset.cell_path.field_id.clone();
let (tx, rx) = tokio::sync::oneshot::channel();
editor
.modify_field_rev(&changeset.cell_path.view_id, &field_id, |field_rev| {
let mut type_option = select_type_option_from_field_rev(field_rev)?;
let mut cell_changeset_str = None;
let mut is_changed = None;
for option in changeset.insert_options {
cell_changeset_str = Some(
SelectOptionCellChangeset::from_insert_option_id(&option.id).to_cell_changeset_str(),
);
type_option.insert_option(option);
is_changed = Some(());
}
for option in changeset.update_options {
type_option.insert_option(option);
is_changed = Some(());
}
for option in changeset.delete_options {
cell_changeset_str = Some(
SelectOptionCellChangeset::from_delete_option_id(&option.id).to_cell_changeset_str(),
);
type_option.delete_option(option);
is_changed = Some(());
}
if is_changed.is_some() {
field_rev.insert_type_option(&*type_option);
}
let _ = tx.send(cell_changeset_str);
Ok(is_changed)
})
.await?;
if let Ok(Some(cell_changeset_str)) = rx.await {
match editor
.update_cell_with_changeset(
&changeset.cell_path.row_id,
&changeset.cell_path.field_id,
cell_changeset_str,
)
.await
{
Ok(_) => {},
Err(e) => tracing::error!("{}", e),
}
}
Ok(())
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_select_option_handler(
data: AFPluginData<CellIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<SelectOptionCellDataPB, FlowyError> {
let params: CellIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.view_id).await?;
match editor.get_field_rev(&params.field_id).await {
None => {
tracing::error!(
"Can't find the select option field with id: {}",
params.field_id
);
data_result_ok(SelectOptionCellDataPB::default())
},
Some(field_rev) => {
//
let cell_rev = editor
.get_cell_rev(&params.row_id, &params.field_id)
.await?;
let type_option = select_type_option_from_field_rev(&field_rev)?;
let type_cell_data: TypeCellData = match cell_rev {
None => TypeCellData {
cell_str: "".to_string(),
field_type: field_rev.ty.into(),
},
Some(cell_rev) => cell_rev.try_into()?,
};
let ids = SelectOptionIds::from_cell_str(&type_cell_data.cell_str)?;
let selected_options = type_option.get_selected_options(ids);
data_result_ok(selected_options)
},
}
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn update_select_option_cell_handler(
data: AFPluginData<SelectOptionCellChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let params: SelectOptionCellChangesetParams = data.into_inner().try_into()?;
let editor = manager
.get_database_editor(&params.cell_identifier.view_id)
.await?;
let changeset = SelectOptionCellChangeset {
insert_option_ids: params.insert_option_ids,
delete_option_ids: params.delete_option_ids,
};
editor
.update_cell_with_changeset(
&params.cell_identifier.row_id,
&params.cell_identifier.field_id,
changeset,
)
.await?;
Ok(())
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn update_date_cell_handler(
data: AFPluginData<DateChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let data = data.into_inner();
let cell_path: CellIdParams = data.cell_path.try_into()?;
let cell_changeset = DateCellChangeset {
date: data.date,
time: data.time,
include_time: data.include_time,
is_utc: data.is_utc,
};
let editor = manager.get_database_editor(&cell_path.view_id).await?;
editor
.update_cell(cell_path.row_id, cell_path.field_id, cell_changeset)
.await?;
Ok(())
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn get_groups_handler(
data: AFPluginData<DatabaseViewIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<RepeatedGroupPB, FlowyError> {
let params: DatabaseViewIdPB = data.into_inner();
let editor = manager.get_database_editor(&params.value).await?;
let groups = editor.load_groups(&params.value).await?;
data_result_ok(groups)
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn get_group_handler(
data: AFPluginData<DatabaseGroupIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<GroupPB, FlowyError> {
let params: DatabaseGroupIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.view_id).await?;
let group = editor.get_group(&params.view_id, &params.group_id).await?;
data_result_ok(group)
}
#[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn move_group_handler(
data: AFPluginData<MoveGroupPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> FlowyResult<()> {
let params: MoveGroupParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(params.view_id.as_ref()).await?;
editor.move_group(params).await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn move_group_row_handler(
data: AFPluginData<MoveGroupRowPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> FlowyResult<()> {
let params: MoveGroupRowParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(params.view_id.as_ref()).await?;
editor.move_group_row(params).await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip(manager), err)]
pub(crate) async fn get_databases_handler(
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<RepeatedDatabaseDescriptionPB, FlowyError> {
let items = manager
.get_databases()
.await?
.into_iter()
.map(|database_info| DatabaseDescriptionPB {
name: database_info.name,
database_id: database_info.database_id,
})
.collect::<Vec<DatabaseDescriptionPB>>();
data_result_ok(RepeatedDatabaseDescriptionPB { items })
}
#[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn set_layout_setting_handler(
data: AFPluginData<UpdateLayoutSettingPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> FlowyResult<()> {
let params: UpdateLayoutSettingParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_editor(params.view_id.as_ref()).await?;
database_editor
.set_layout_setting(&params.view_id, params.layout_setting)
.await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn get_layout_setting_handler(
data: AFPluginData<DatabaseLayoutIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<LayoutSettingPB, FlowyError> {
let params = data.into_inner();
let database_editor = manager.get_database_editor(&params.view_id).await?;
let layout_setting = database_editor
.get_layout_setting(&params.view_id, params.layout)
.await?;
data_result_ok(layout_setting.into())
}
#[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn get_calendar_events_handler(
data: AFPluginData<CalendarEventRequestPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<RepeatedCalendarEventPB, FlowyError> {
let params: CalendarEventRequestParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_editor(&params.view_id).await?;
let events = database_editor
.get_all_calendar_events(&params.view_id)
.await;
data_result_ok(RepeatedCalendarEventPB { items: events })
}
#[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn get_calendar_event_handler(
data: AFPluginData<RowIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<CalendarEventPB, FlowyError> {
let params: RowIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_editor(&params.view_id).await?;
let event = database_editor
.get_calendar_event(&params.view_id, &params.row_id)
.await;
match event {
None => Err(FlowyError::record_not_found()),
Some(event) => data_result_ok(event),
}
}

View File

@ -1,259 +0,0 @@
use crate::event_handler::*;
use crate::manager::DatabaseManager;
use flowy_derive::{Flowy_Event, ProtoBuf_Enum};
use lib_dispatch::prelude::*;
use std::sync::Arc;
use strum_macros::Display;
pub fn init(database_manager: Arc<DatabaseManager>) -> AFPlugin {
let mut plugin = AFPlugin::new()
.name(env!("CARGO_PKG_NAME"))
.state(database_manager);
plugin = plugin
.event(DatabaseEvent::GetDatabase, get_database_data_handler)
// .event(GridEvent::GetGridBlocks, get_grid_blocks_handler)
.event(DatabaseEvent::GetDatabaseSetting, get_database_setting_handler)
.event(DatabaseEvent::UpdateDatabaseSetting, update_database_setting_handler)
.event(DatabaseEvent::GetAllFilters, get_all_filters_handler)
.event(DatabaseEvent::GetAllSorts, get_all_sorts_handler)
.event(DatabaseEvent::DeleteAllSorts, delete_all_sorts_handler)
// Field
.event(DatabaseEvent::GetFields, get_fields_handler)
.event(DatabaseEvent::UpdateField, update_field_handler)
.event(DatabaseEvent::UpdateFieldTypeOption, update_field_type_option_handler)
.event(DatabaseEvent::DeleteField, delete_field_handler)
.event(DatabaseEvent::UpdateFieldType, switch_to_field_handler)
.event(DatabaseEvent::DuplicateField, duplicate_field_handler)
.event(DatabaseEvent::MoveField, move_field_handler)
.event(DatabaseEvent::GetTypeOption, get_field_type_option_data_handler)
.event(DatabaseEvent::CreateTypeOption, create_field_type_option_data_handler)
// Row
.event(DatabaseEvent::CreateRow, create_row_handler)
.event(DatabaseEvent::GetRow, get_row_handler)
.event(DatabaseEvent::DeleteRow, delete_row_handler)
.event(DatabaseEvent::DuplicateRow, duplicate_row_handler)
.event(DatabaseEvent::MoveRow, move_row_handler)
// Cell
.event(DatabaseEvent::GetCell, get_cell_handler)
.event(DatabaseEvent::UpdateCell, update_cell_handler)
// SelectOption
.event(DatabaseEvent::CreateSelectOption, new_select_option_handler)
.event(DatabaseEvent::UpdateSelectOption, update_select_option_handler)
.event(DatabaseEvent::GetSelectOptionCellData, get_select_option_handler)
.event(DatabaseEvent::UpdateSelectOptionCell, update_select_option_cell_handler)
// Date
.event(DatabaseEvent::UpdateDateCell, update_date_cell_handler)
// Group
.event(DatabaseEvent::MoveGroup, move_group_handler)
.event(DatabaseEvent::MoveGroupRow, move_group_row_handler)
.event(DatabaseEvent::GetGroups, get_groups_handler)
.event(DatabaseEvent::GetGroup, get_group_handler)
// Database
.event(DatabaseEvent::GetDatabases, get_databases_handler)
// Calendar
.event(DatabaseEvent::GetAllCalendarEvents, get_calendar_events_handler)
.event(DatabaseEvent::GetCalendarEvent, get_calendar_event_handler)
// Layout setting
.event(DatabaseEvent::SetLayoutSetting, set_layout_setting_handler)
.event(DatabaseEvent::GetLayoutSetting, get_layout_setting_handler);
plugin
}
/// [DatabaseEvent] defines events that are used to interact with the Grid. You could check [this](https://appflowy.gitbook.io/docs/essential-documentation/contribute-to-appflowy/architecture/backend/protobuf)
/// out, it includes how to use these annotations: input, output, etc.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Display, Hash, ProtoBuf_Enum, Flowy_Event)]
#[event_err = "FlowyError"]
pub enum DatabaseEvent {
/// [GetDatabase] event is used to get the [DatabasePB]
///
/// The event handler accepts a [DatabaseViewIdPB] and returns a [DatabasePB] if there are no errors.
#[event(input = "DatabaseViewIdPB", output = "DatabasePB")]
GetDatabase = 0,
/// [GetDatabaseSetting] event is used to get the database's settings.
///
/// The event handler accepts [DatabaseViewIdPB] and return [DatabaseViewSettingPB]
/// if there is no errors.
#[event(input = "DatabaseViewIdPB", output = "DatabaseViewSettingPB")]
GetDatabaseSetting = 2,
/// [UpdateDatabaseSetting] event is used to update the database's settings.
///
/// The event handler accepts [DatabaseSettingChangesetPB] and return errors if failed to modify the grid's settings.
#[event(input = "DatabaseSettingChangesetPB")]
UpdateDatabaseSetting = 3,
#[event(input = "DatabaseViewIdPB", output = "RepeatedFilterPB")]
GetAllFilters = 4,
#[event(input = "DatabaseViewIdPB", output = "RepeatedSortPB")]
GetAllSorts = 5,
#[event(input = "DatabaseViewIdPB")]
DeleteAllSorts = 6,
/// [GetFields] event is used to get the database's settings.
///
/// The event handler accepts a [GetFieldPayloadPB] and returns a [RepeatedFieldPB]
/// if there are no errors.
#[event(input = "GetFieldPayloadPB", output = "RepeatedFieldPB")]
GetFields = 10,
/// [UpdateField] event is used to update a field's attributes.
///
/// The event handler accepts a [FieldChangesetPB] and returns errors if failed to modify the
/// field.
#[event(input = "FieldChangesetPB")]
UpdateField = 11,
/// [UpdateFieldTypeOption] event is used to update the field's type-option data. Certain field
/// types have user-defined options such as color, date format, number format, or a list of values
/// for a multi-select list. These options are defined within a specialization of the
/// FieldTypeOption class.
///
/// Check out [this](https://appflowy.gitbook.io/docs/essential-documentation/contribute-to-appflowy/architecture/frontend/grid#fieldtype)
/// for more information.
///
/// The event handler accepts a [TypeOptionChangesetPB] and returns errors if failed to modify the
/// field.
#[event(input = "TypeOptionChangesetPB")]
UpdateFieldTypeOption = 12,
/// [DeleteField] event is used to delete a Field. [DeleteFieldPayloadPB] is the context that
/// is used to delete the field from the Database.
#[event(input = "DeleteFieldPayloadPB")]
DeleteField = 14,
/// [UpdateFieldType] event is used to update the current Field's type.
/// It will insert a new FieldTypeOptionData if the new FieldType doesn't exist before, otherwise
/// reuse the existing FieldTypeOptionData. You could check the [DatabaseRevisionPad] for more details.
#[event(input = "UpdateFieldTypePayloadPB")]
UpdateFieldType = 20,
/// [DuplicateField] event is used to duplicate a Field. The duplicated field data is kind of
/// deep copy of the target field. The passed in [DuplicateFieldPayloadPB] is the context that is
/// used to duplicate the field.
///
/// Return errors if failed to duplicate the field.
///
#[event(input = "DuplicateFieldPayloadPB")]
DuplicateField = 21,
/// [MoveItem] event is used to move an item. For the moment, Item has two types defined in
/// [MoveItemTypePB].
#[event(input = "MoveFieldPayloadPB")]
MoveField = 22,
/// [TypeOptionPathPB] event is used to get the FieldTypeOption data for a specific field type.
///
/// Check out the [TypeOptionPB] for more details. If the [FieldTypeOptionData] does exist
/// for the target type, the [TypeOptionBuilder] will create the default data for that type.
///
/// Return the [TypeOptionPB] if there are no errors.
#[event(input = "TypeOptionPathPB", output = "TypeOptionPB")]
GetTypeOption = 23,
/// [CreateTypeOption] event is used to create a new FieldTypeOptionData.
#[event(input = "CreateFieldPayloadPB", output = "TypeOptionPB")]
CreateTypeOption = 24,
/// [CreateSelectOption] event is used to create a new select option. Returns a [SelectOptionPB] if
/// there are no errors.
#[event(input = "CreateSelectOptionPayloadPB", output = "SelectOptionPB")]
CreateSelectOption = 30,
/// [GetSelectOptionCellData] event is used to get the select option data for cell editing.
/// [CellIdPB] locate which cell data that will be read from. The return value, [SelectOptionCellDataPB]
/// contains the available options and the currently selected options.
#[event(input = "CellIdPB", output = "SelectOptionCellDataPB")]
GetSelectOptionCellData = 31,
/// [UpdateSelectOption] event is used to update a FieldTypeOptionData whose field_type is
/// FieldType::SingleSelect or FieldType::MultiSelect.
///
/// This event may trigger the DatabaseNotification::DidUpdateCell event.
/// For example, DatabaseNotification::DidUpdateCell will be triggered if the [SelectOptionChangesetPB]
/// carries a change that updates the name of the option.
#[event(input = "SelectOptionChangesetPB")]
UpdateSelectOption = 32,
#[event(input = "CreateRowPayloadPB", output = "RowPB")]
CreateRow = 50,
/// [GetRow] event is used to get the row data,[RowPB]. [OptionalRowPB] is a wrapper that enables
/// to return a nullable row data.
#[event(input = "RowIdPB", output = "OptionalRowPB")]
GetRow = 51,
#[event(input = "RowIdPB")]
DeleteRow = 52,
#[event(input = "RowIdPB")]
DuplicateRow = 53,
#[event(input = "MoveRowPayloadPB")]
MoveRow = 54,
#[event(input = "CellIdPB", output = "CellPB")]
GetCell = 70,
/// [UpdateCell] event is used to update the cell content. The passed in data, [CellChangesetPB],
/// carries the changes that will be applied to the cell content by calling `update_cell` function.
///
/// The 'content' property of the [CellChangesetPB] is a String type. It can be used directly if the
/// cell uses string data. For example, the TextCell or NumberCell.
///
/// But,it can be treated as a generic type, because we can use [serde] to deserialize the string
/// into a specific data type. For the moment, the 'content' will be deserialized to a concrete type
/// when the FieldType is SingleSelect, DateTime, and MultiSelect. Please see
/// the [UpdateSelectOptionCell] and [UpdateDateCell] events for more details.
#[event(input = "CellChangesetPB")]
UpdateCell = 71,
/// [UpdateSelectOptionCell] event is used to update a select option cell's data. [SelectOptionCellChangesetPB]
/// contains options that will be deleted or inserted. It can be cast to [CellChangesetPB] that
/// will be used by the `update_cell` function.
#[event(input = "SelectOptionCellChangesetPB")]
UpdateSelectOptionCell = 72,
/// [UpdateDateCell] event is used to update a date cell's data. [DateChangesetPB]
/// contains the date and the time string. It can be cast to [CellChangesetPB] that
/// will be used by the `update_cell` function.
#[event(input = "DateChangesetPB")]
UpdateDateCell = 80,
#[event(input = "DatabaseViewIdPB", output = "RepeatedGroupPB")]
GetGroups = 100,
#[event(input = "DatabaseGroupIdPB", output = "GroupPB")]
GetGroup = 101,
#[event(input = "MoveGroupPayloadPB")]
MoveGroup = 111,
#[event(input = "MoveGroupRowPayloadPB")]
MoveGroupRow = 112,
#[event(input = "MoveGroupRowPayloadPB")]
GroupByField = 113,
/// Returns all the databases
#[event(output = "RepeatedDatabaseDescriptionPB")]
GetDatabases = 114,
#[event(input = "UpdateLayoutSettingPB")]
SetLayoutSetting = 115,
#[event(input = "DatabaseLayoutIdPB", output = "LayoutSettingPB")]
GetLayoutSetting = 116,
#[event(input = "CalendarEventRequestPB", output = "RepeatedCalendarEventPB")]
GetAllCalendarEvents = 117,
#[event(input = "RowIdPB", output = "CalendarEventPB")]
GetCalendarEvent = 118,
#[event(input = "MoveCalendarEventPB")]
MoveCalendarEvent = 119,
}

View File

@ -1,14 +0,0 @@
extern crate core;
#[macro_use]
mod macros;
mod event_handler;
pub mod event_map;
pub mod manager;
pub mod entities;
mod notification;
mod protobuf;
pub mod services;
pub mod util;

View File

@ -1,92 +0,0 @@
#[macro_export]
macro_rules! impl_into_box_type_option_builder {
($target: ident) => {
impl std::convert::From<$target> for BoxTypeOptionBuilder {
fn from(target: $target) -> BoxTypeOptionBuilder {
Box::new(target)
}
}
};
}
macro_rules! impl_builder_from_json_str_and_from_bytes {
($target: ident,$type_option: ident) => {
impl $target {
pub fn from_protobuf_bytes(bytes: Bytes) -> $target {
let type_option = $type_option::from_protobuf_bytes(bytes);
$target(type_option)
}
pub fn from_json_str(s: &str) -> $target {
let type_option = $type_option::from_json_str(s);
$target(type_option)
}
}
};
}
#[macro_export]
macro_rules! impl_type_option {
($target: ident, $field_type:expr) => {
impl std::convert::From<&FieldRevision> for $target {
fn from(field_rev: &FieldRevision) -> $target {
match field_rev.get_type_option::<$target>($field_type.into()) {
None => $target::default(),
Some(target) => target,
}
}
}
impl std::convert::From<&std::sync::Arc<FieldRevision>> for $target {
fn from(field_rev: &std::sync::Arc<FieldRevision>) -> $target {
match field_rev.get_type_option::<$target>($field_type.into()) {
None => $target::default(),
Some(target) => target,
}
}
}
impl std::convert::From<$target> for String {
fn from(type_option: $target) -> String {
type_option.json_str()
}
}
impl TypeOptionDataSerializer for $target {
fn json_str(&self) -> String {
match serde_json::to_string(&self) {
Ok(s) => s,
Err(e) => {
tracing::error!("Field type data serialize to json fail, error: {:?}", e);
serde_json::to_string(&$target::default()).unwrap()
},
}
}
fn protobuf_bytes(&self) -> Bytes {
self.clone().try_into().unwrap()
}
}
impl TypeOptionDataDeserializer for $target {
fn from_json_str(s: &str) -> $target {
match serde_json::from_str(s) {
Ok(obj) => obj,
Err(err) => {
tracing::error!(
"{} type option deserialize from {} failed, {:?}",
stringify!($target),
s,
err
);
$target::default()
},
}
}
fn from_protobuf_bytes(bytes: Bytes) -> $target {
$target::try_from(bytes).unwrap_or($target::default())
}
}
};
}

View File

@ -1,438 +0,0 @@
use crate::entities::DatabaseLayoutPB;
use crate::services::database::{
make_database_block_rev_manager, DatabaseEditor, DatabaseRefIndexerQuery,
DatabaseRevisionCloudService, DatabaseRevisionMergeable, DatabaseRevisionSerde,
};
use crate::services::database_view::{
make_database_view_rev_manager, make_database_view_revision_pad, DatabaseViewEditor,
};
use crate::services::persistence::block_index::BlockRowIndexer;
use crate::services::persistence::database_ref::{DatabaseInfo, DatabaseRefs, DatabaseViewRef};
use crate::services::persistence::kv::DatabaseKVPersistence;
use crate::services::persistence::migration::DatabaseMigration;
use crate::services::persistence::rev_sqlite::{
SQLiteDatabaseRevisionPersistence, SQLiteDatabaseRevisionSnapshotPersistence,
};
use crate::services::persistence::DatabaseDBConnection;
use std::collections::HashMap;
use database_model::{
gen_database_id, BuildDatabaseContext, DatabaseRevision, DatabaseViewRevision,
};
use flowy_client_sync::client_database::{
make_database_block_operations, make_database_operations, make_database_view_operations,
};
use flowy_error::{FlowyError, FlowyResult};
use flowy_revision::{
RevisionManager, RevisionPersistence, RevisionPersistenceConfiguration, RevisionWebSocket,
};
use flowy_sqlite::ConnectionPool;
use flowy_task::TaskDispatcher;
use lib_infra::future::Fut;
use revision_model::Revision;
use std::sync::Arc;
use tokio::sync::RwLock;
pub trait DatabaseUser: Send + Sync {
fn user_id(&self) -> Result<i64, FlowyError>;
fn token(&self) -> Result<String, FlowyError>;
fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError>;
}
pub struct DatabaseManager {
editors_by_database_id: RwLock<HashMap<String, Arc<DatabaseEditor>>>,
database_user: Arc<dyn DatabaseUser>,
block_indexer: Arc<BlockRowIndexer>,
database_refs: Arc<DatabaseRefs>,
#[allow(dead_code)]
kv_persistence: Arc<DatabaseKVPersistence>,
task_scheduler: Arc<RwLock<TaskDispatcher>>,
#[allow(dead_code)]
migration: DatabaseMigration,
}
impl DatabaseManager {
pub fn new(
database_user: Arc<dyn DatabaseUser>,
_rev_web_socket: Arc<dyn RevisionWebSocket>,
task_scheduler: Arc<RwLock<TaskDispatcher>>,
database_db: Arc<dyn DatabaseDBConnection>,
) -> Self {
let editors_by_database_id = RwLock::new(HashMap::new());
let kv_persistence = Arc::new(DatabaseKVPersistence::new(database_db.clone()));
let block_indexer = Arc::new(BlockRowIndexer::new(database_db.clone()));
let database_refs = Arc::new(DatabaseRefs::new(database_db));
let migration = DatabaseMigration::new(database_user.clone(), database_refs.clone());
Self {
editors_by_database_id,
database_user,
kv_persistence,
block_indexer,
database_refs,
task_scheduler,
migration,
}
}
pub async fn initialize_with_new_user(&self, _user_id: i64, _token: &str) -> FlowyResult<()> {
Ok(())
}
pub async fn initialize(
&self,
user_id: i64,
_token: &str,
get_views_fn: Fut<Vec<(String, String, DatabaseLayoutPB)>>,
) -> FlowyResult<()> {
self.migration.run(user_id, get_views_fn).await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub async fn create_database<T: AsRef<str>>(
&self,
database_id: &str,
view_id: T,
name: &str,
revisions: Vec<Revision>,
) -> FlowyResult<()> {
let db_pool = self.database_user.db_pool()?;
let _ = self
.database_refs
.bind(database_id, view_id.as_ref(), true, name);
let rev_manager = self.make_database_rev_manager(database_id, db_pool)?;
rev_manager.reset_object(revisions).await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip_all, err)]
async fn create_database_view<T: AsRef<str>>(
&self,
view_id: T,
revisions: Vec<Revision>,
) -> FlowyResult<()> {
let view_id = view_id.as_ref();
let pool = self.database_user.db_pool()?;
let rev_manager = make_database_view_rev_manager(pool, view_id).await?;
rev_manager.reset_object(revisions).await?;
Ok(())
}
pub async fn create_database_block<T: AsRef<str>>(
&self,
block_id: T,
revisions: Vec<Revision>,
) -> FlowyResult<()> {
let block_id = block_id.as_ref();
let rev_manager = make_database_block_rev_manager(&self.database_user, block_id)?;
rev_manager.reset_object(revisions).await?;
Ok(())
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub async fn open_database_view<T: AsRef<str>>(
&self,
view_id: T,
) -> FlowyResult<Arc<DatabaseEditor>> {
let view_id = view_id.as_ref();
let database_info = self.database_refs.get_database_with_view(view_id)?;
self
.get_or_create_database_editor(&database_info.database_id, view_id)
.await
}
#[tracing::instrument(level = "debug", skip_all)]
pub async fn close_database_view<T: AsRef<str>>(&self, view_id: T) -> FlowyResult<()> {
let view_id = view_id.as_ref();
let database_info = self.database_refs.get_database_with_view(view_id)?;
tracing::Span::current().record("database_id", &database_info.database_id);
// Create a temporary reference database_editor in case of holding the write lock
// of editors_by_database_id too long.
let database_editor = self
.editors_by_database_id
.write()
.await
.remove(&database_info.database_id);
if let Some(database_editor) = database_editor {
database_editor.close_view_editor(view_id).await;
if database_editor.number_of_ref_views().await == 0 {
database_editor.dispose().await;
} else {
self
.editors_by_database_id
.write()
.await
.insert(database_info.database_id, database_editor);
}
}
Ok(())
}
// #[tracing::instrument(level = "debug", skip(self), err)]
pub async fn get_database_editor(&self, view_id: &str) -> FlowyResult<Arc<DatabaseEditor>> {
let database_info = self.database_refs.get_database_with_view(view_id)?;
let database_editor = self
.editors_by_database_id
.read()
.await
.get(&database_info.database_id)
.cloned();
match database_editor {
None => {
// Drop the read_guard ASAP in case of the following read/write lock
self.open_database_view(view_id).await
},
Some(editor) => Ok(editor),
}
}
pub async fn get_databases(&self) -> FlowyResult<Vec<DatabaseInfo>> {
self.database_refs.get_all_databases()
}
pub async fn get_database_ref_views(
&self,
database_id: &str,
) -> FlowyResult<Vec<DatabaseViewRef>> {
self.database_refs.get_ref_views_with_database(database_id)
}
async fn get_or_create_database_editor(
&self,
database_id: &str,
view_id: &str,
) -> FlowyResult<Arc<DatabaseEditor>> {
let user = self.database_user.clone();
let create_view_editor = |database_editor: Arc<DatabaseEditor>| async move {
let (view_pad, view_rev_manager) = make_database_view_revision_pad(view_id, user).await?;
DatabaseViewEditor::from_pad(
database_editor.database_view_data.clone(),
database_editor.cell_data_cache.clone(),
view_rev_manager,
view_pad,
)
.await
};
let database_editor = self
.editors_by_database_id
.read()
.await
.get(database_id)
.cloned();
match database_editor {
None => {
let mut editors_by_database_id = self.editors_by_database_id.write().await;
let db_pool = self.database_user.db_pool()?;
let database_editor = self.make_database_rev_editor(view_id, db_pool).await?;
editors_by_database_id.insert(database_id.to_string(), database_editor.clone());
Ok(database_editor)
},
Some(database_editor) => {
let is_open = database_editor.is_view_open(view_id).await;
if !is_open {
let database_view_editor = create_view_editor(database_editor.clone()).await?;
database_editor.open_view_editor(database_view_editor).await;
}
Ok(database_editor)
},
}
}
#[tracing::instrument(level = "trace", skip(self, pool), err)]
async fn make_database_rev_editor(
&self,
view_id: &str,
pool: Arc<ConnectionPool>,
) -> Result<Arc<DatabaseEditor>, FlowyError> {
let user = self.database_user.clone();
let (base_view_pad, base_view_rev_manager) =
make_database_view_revision_pad(view_id, user.clone()).await?;
let mut database_id = base_view_pad.database_id.clone();
tracing::debug!("Open database: {} with view: {}", database_id, view_id);
if database_id.is_empty() {
// Before the database_id concept comes up, we used the view_id directly. So if
// the database_id is empty, which means we can used the view_id. After the version 0.1.1,
// we start to used the database_id that enables binding different views to the same database.
database_id = view_id.to_owned();
}
let token = user.token()?;
let cloud = Arc::new(DatabaseRevisionCloudService::new(token));
let mut rev_manager = self.make_database_rev_manager(&database_id, pool.clone())?;
let database_pad = Arc::new(RwLock::new(
rev_manager
.initialize::<DatabaseRevisionSerde>(Some(cloud))
.await?,
));
let database_editor = DatabaseEditor::new(
&database_id,
user,
database_pad,
rev_manager,
self.block_indexer.clone(),
self.database_refs.clone(),
self.task_scheduler.clone(),
)
.await?;
let base_view_editor = DatabaseViewEditor::from_pad(
database_editor.database_view_data.clone(),
database_editor.cell_data_cache.clone(),
base_view_rev_manager,
base_view_pad,
)
.await?;
database_editor.open_view_editor(base_view_editor).await;
Ok(database_editor)
}
#[tracing::instrument(level = "trace", skip(self, pool), err)]
pub fn make_database_rev_manager(
&self,
database_id: &str,
pool: Arc<ConnectionPool>,
) -> FlowyResult<RevisionManager<Arc<ConnectionPool>>> {
// Create revision persistence
let disk_cache = SQLiteDatabaseRevisionPersistence::new(pool.clone());
let configuration = RevisionPersistenceConfiguration::new(6, false);
let rev_persistence = RevisionPersistence::new(database_id, disk_cache, configuration);
// Create snapshot persistence
const DATABASE_SP_PREFIX: &str = "grid";
let snapshot_object_id = format!("{}:{}", DATABASE_SP_PREFIX, database_id);
let snapshot_persistence =
SQLiteDatabaseRevisionSnapshotPersistence::new(&snapshot_object_id, pool);
let rev_compress = DatabaseRevisionMergeable();
let rev_manager = RevisionManager::new(
database_id,
rev_persistence,
rev_compress,
snapshot_persistence,
);
Ok(rev_manager)
}
}
pub async fn link_existing_database(
view_id: &str,
name: String,
database_id: &str,
layout: DatabaseLayoutPB,
database_manager: Arc<DatabaseManager>,
) -> FlowyResult<()> {
tracing::trace!(
"Link database view: {} with database: {}",
view_id,
database_id
);
let database_view_rev = DatabaseViewRevision::new(
database_id.to_string(),
view_id.to_owned(),
false,
name.clone(),
layout.into(),
);
let database_view_ops = make_database_view_operations(&database_view_rev);
let database_view_bytes = database_view_ops.json_bytes();
let revision = Revision::initial_revision(view_id, database_view_bytes);
database_manager
.create_database_view(view_id, vec![revision])
.await?;
let _ = database_manager
.database_refs
.bind(database_id, view_id, false, &name);
Ok(())
}
pub async fn create_new_database(
view_id: &str,
name: String,
layout: DatabaseLayoutPB,
database_manager: Arc<DatabaseManager>,
build_context: BuildDatabaseContext,
) -> FlowyResult<()> {
let BuildDatabaseContext {
field_revs,
block_metas,
blocks,
database_view_data,
layout_setting,
} = build_context;
for block_meta_data in &blocks {
let block_id = &block_meta_data.block_id;
// Indexing the block's rows
block_meta_data.rows.iter().for_each(|row| {
let _ = database_manager
.block_indexer
.insert(&row.block_id, &row.id);
});
// Create database's block
let database_block_ops = make_database_block_operations(block_meta_data);
let database_block_bytes = database_block_ops.json_bytes();
let revision = Revision::initial_revision(block_id, database_block_bytes);
database_manager
.create_database_block(&block_id, vec![revision])
.await?;
}
let database_id = gen_database_id();
let database_rev = DatabaseRevision::from_build_context(&database_id, field_revs, block_metas);
// Create database
tracing::trace!("Create new database: {}", database_id);
let database_ops = make_database_operations(&database_rev);
let database_bytes = database_ops.json_bytes();
let revision = Revision::initial_revision(&database_id, database_bytes);
database_manager
.create_database(&database_id, &view_id, &name, vec![revision])
.await?;
// Create database view
tracing::trace!("Create new database view: {}", view_id);
let database_view = if database_view_data.is_empty() {
let mut database_view =
DatabaseViewRevision::new(database_id, view_id.to_owned(), true, name, layout.into());
database_view.layout_settings = layout_setting;
database_view
} else {
let mut database_view = DatabaseViewRevision::from_json(database_view_data)?;
database_view.database_id = database_id;
// Replace the view id with the new one. This logic will be removed in the future.
database_view.view_id = view_id.to_owned();
database_view
};
let database_view_ops = make_database_view_operations(&database_view);
let database_view_bytes = database_view_ops.json_bytes();
let revision = Revision::initial_revision(view_id, database_view_bytes);
database_manager
.create_database_view(view_id, vec![revision])
.await?;
Ok(())
}
impl DatabaseRefIndexerQuery for DatabaseRefs {
fn get_ref_views(&self, database_id: &str) -> FlowyResult<Vec<DatabaseViewRef>> {
self.get_ref_views_with_database(database_id)
}
}
impl DatabaseRefIndexerQuery for Arc<DatabaseRefs> {
fn get_ref_views(&self, database_id: &str) -> FlowyResult<Vec<DatabaseViewRef>> {
(**self).get_ref_views(database_id)
}
}

View File

@ -1,55 +0,0 @@
use flowy_derive::ProtoBuf_Enum;
use flowy_notification::NotificationBuilder;
const OBSERVABLE_CATEGORY: &str = "Grid";
#[derive(ProtoBuf_Enum, Debug)]
pub enum DatabaseNotification {
Unknown = 0,
/// Trigger after inserting/deleting/updating a row
DidUpdateViewRows = 20,
/// Trigger when the visibility of the row was changed. For example, updating the filter will trigger the notification
DidUpdateViewRowsVisibility = 21,
/// Trigger after inserting/deleting/updating a field
DidUpdateFields = 22,
/// Trigger after editing a cell
DidUpdateCell = 40,
/// Trigger after editing a field properties including rename,update type option, etc
DidUpdateField = 50,
/// Trigger after the number of groups is changed
DidUpdateGroups = 60,
/// Trigger after inserting/deleting/updating/moving a row
DidUpdateGroupRow = 61,
/// Trigger when setting a new grouping field
DidGroupByField = 62,
/// Trigger after inserting/deleting/updating a filter
DidUpdateFilter = 63,
/// Trigger after inserting/deleting/updating a sort
DidUpdateSort = 64,
/// Trigger after the sort configurations are changed
DidReorderRows = 65,
/// Trigger after editing the row that hit the sort rule
DidReorderSingleRow = 66,
/// Trigger when the settings of the database are changed
DidUpdateSettings = 70,
// Trigger when the layout setting of the database is updated
DidUpdateLayoutSettings = 80,
// Trigger when the layout field of the database is changed
DidSetNewLayoutField = 81,
}
impl std::default::Default for DatabaseNotification {
fn default() -> Self {
DatabaseNotification::Unknown
}
}
impl std::convert::From<DatabaseNotification> for i32 {
fn from(notification: DatabaseNotification) -> Self {
notification as i32
}
}
#[tracing::instrument(level = "trace")]
pub fn send_notification(id: &str, ty: DatabaseNotification) -> NotificationBuilder {
NotificationBuilder::new(id, ty, OBSERVABLE_CATEGORY)
}

View File

@ -1,128 +0,0 @@
use parking_lot::RwLock;
use std::any::{type_name, Any};
use std::collections::HashMap;
use crate::services::filter::FilterType;
use std::fmt::Debug;
use std::hash::Hash;
use std::sync::Arc;
pub type AtomicCellDataCache = Arc<RwLock<AnyTypeCache<u64>>>;
pub type AtomicCellFilterCache = Arc<RwLock<AnyTypeCache<FilterType>>>;
#[derive(Default, Debug)]
pub struct AnyTypeCache<TypeValueKey>(HashMap<TypeValueKey, TypeValue>);
impl<TypeValueKey> AnyTypeCache<TypeValueKey>
where
TypeValueKey: Clone + Hash + Eq,
{
pub fn new() -> Arc<RwLock<AnyTypeCache<TypeValueKey>>> {
Arc::new(RwLock::new(AnyTypeCache(HashMap::default())))
}
pub fn insert<T>(&mut self, key: &TypeValueKey, val: T) -> Option<T>
where
T: 'static + Send + Sync,
{
self
.0
.insert(key.clone(), TypeValue::new(val))
.and_then(downcast_owned)
}
pub fn remove(&mut self, key: &TypeValueKey) {
self.0.remove(key);
}
// pub fn remove<T, K: AsRef<TypeValueKey>>(&mut self, key: K) -> Option<T>
// where
// T: 'static + Send + Sync,
// {
// self.0.remove(key.as_ref()).and_then(downcast_owned)
// }
pub fn get<T>(&self, key: &TypeValueKey) -> Option<&T>
where
T: 'static + Send + Sync,
{
self
.0
.get(key)
.and_then(|type_value| type_value.boxed.downcast_ref())
}
pub fn get_mut<T>(&mut self, key: &TypeValueKey) -> Option<&mut T>
where
T: 'static + Send + Sync,
{
self
.0
.get_mut(key)
.and_then(|type_value| type_value.boxed.downcast_mut())
}
pub fn contains(&self, key: &TypeValueKey) -> bool {
self.0.contains_key(key)
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
}
fn downcast_owned<T: 'static + Send + Sync>(type_value: TypeValue) -> Option<T> {
type_value.boxed.downcast().ok().map(|boxed| *boxed)
}
#[derive(Debug)]
struct TypeValue {
boxed: Box<dyn Any + Send + Sync + 'static>,
#[allow(dead_code)]
ty: &'static str,
}
impl TypeValue {
pub fn new<T>(value: T) -> Self
where
T: Send + Sync + 'static,
{
Self {
boxed: Box::new(value),
ty: type_name::<T>(),
}
}
}
impl std::ops::Deref for TypeValue {
type Target = Box<dyn Any + Send + Sync + 'static>;
fn deref(&self) -> &Self::Target {
&self.boxed
}
}
impl std::ops::DerefMut for TypeValue {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.boxed
}
}
// #[cfg(test)]
// mod tests {
// use crate::services::cell::CellDataCache;
//
// #[test]
// fn test() {
// let mut ext = CellDataCache::new();
// ext.insert("1", "a".to_string());
// ext.insert("2", 2);
//
// let a: &String = ext.get("1").unwrap();
// assert_eq!(a, "a");
//
// let a: Option<&usize> = ext.get("1");
// assert!(a.is_none());
// }
// }

View File

@ -1,346 +0,0 @@
use crate::entities::FieldType;
use crate::services::cell::{AtomicCellDataCache, CellProtobufBlob, TypeCellData};
use crate::services::field::*;
use crate::services::group::make_no_status_group;
use database_model::{CellRevision, FieldRevision};
use flowy_error::{ErrorCode, FlowyError, FlowyResult};
use std::fmt::Debug;
/// Decode the opaque cell data into readable format content
pub trait CellDataDecoder: TypeOption {
///
/// Tries to decode the opaque cell string to `decoded_field_type`'s cell data. Sometimes, the `field_type`
/// of the `FieldRevision` is not equal to the `decoded_field_type`(This happened When switching
/// the field type of the `FieldRevision` to another field type). So the cell data is need to do
/// some transformation.
///
/// For example, the current field type of the `FieldRevision` is a checkbox. When switching the field
/// type from the checkbox to single select, it will create two new options,`Yes` and `No`, if they don't exist.
/// But the data of the cell doesn't change. We can't iterate all the rows to transform the cell
/// data that can be parsed by the current field type. One approach is to transform the cell data
/// when it get read. For the moment, the cell data is a string, `Yes` or `No`. It needs to compare
/// with the option's name, if match return the id of the option.
fn decode_cell_str(
&self,
cell_str: String,
decoded_field_type: &FieldType,
field_rev: &FieldRevision,
) -> FlowyResult<<Self as TypeOption>::CellData>;
/// Same as `decode_cell_data` does but Decode the cell data to readable `String`
/// For example, The string of the Multi-Select cell will be a list of the option's name
/// separated by a comma.
fn decode_cell_data_to_str(&self, cell_data: <Self as TypeOption>::CellData) -> String;
}
pub trait CellDataChangeset: TypeOption {
/// The changeset is able to parse into the concrete data struct if `TypeOption::CellChangeset`
/// implements the `FromCellChangesetString` trait.
/// For example,the SelectOptionCellChangeset,DateCellChangeset. etc.
///
fn apply_changeset(
&self,
changeset: <Self as TypeOption>::CellChangeset,
type_cell_data: Option<TypeCellData>,
) -> FlowyResult<(String, <Self as TypeOption>::CellData)>;
}
/// changeset: It will be deserialized into specific data base on the FieldType.
/// For example,
/// FieldType::RichText => String
/// FieldType::SingleSelect => SelectOptionChangeset
///
/// cell_rev: It will be None if the cell does not contain any data.
pub fn apply_cell_data_changeset<C: ToCellChangesetString, T: AsRef<FieldRevision>>(
changeset: C,
cell_rev: Option<CellRevision>,
field_rev: T,
cell_data_cache: Option<AtomicCellDataCache>,
) -> Result<String, FlowyError> {
let field_rev = field_rev.as_ref();
let changeset = changeset.to_cell_changeset_str();
let field_type: FieldType = field_rev.ty.into();
let type_cell_data = cell_rev.and_then(|cell_rev| match TypeCellData::try_from(cell_rev) {
Ok(type_cell_data) => Some(type_cell_data),
Err(_) => None,
});
let cell_str = match TypeOptionCellExt::new_with_cell_data_cache(field_rev, cell_data_cache)
.get_type_option_cell_data_handler(&field_type)
{
None => "".to_string(),
Some(handler) => handler.handle_cell_changeset(changeset, type_cell_data, field_rev)?,
};
Ok(TypeCellData::new(cell_str, field_type).to_json())
}
pub fn get_type_cell_protobuf<T: TryInto<TypeCellData, Error = FlowyError> + Debug>(
data: T,
field_rev: &FieldRevision,
cell_data_cache: Option<AtomicCellDataCache>,
) -> (FieldType, CellProtobufBlob) {
let to_field_type = field_rev.ty.into();
match data.try_into() {
Ok(type_cell_data) => {
let TypeCellData {
cell_str,
field_type,
} = type_cell_data;
match try_decode_cell_str_to_cell_protobuf(
cell_str,
&field_type,
&to_field_type,
field_rev,
cell_data_cache,
) {
Ok(cell_bytes) => (field_type, cell_bytes),
Err(e) => {
tracing::error!("Decode cell data failed, {:?}", e);
(field_type, CellProtobufBlob::default())
},
}
},
Err(_err) => {
// It's okay to ignore this error, because it's okay that the current cell can't
// display the existing cell data. For example, the UI of the text cell will be blank if
// the type of the data of cell is Number.
(to_field_type, CellProtobufBlob::default())
},
}
}
pub fn get_type_cell_data<CellData, Output>(
data: CellData,
field_rev: &FieldRevision,
cell_data_cache: Option<AtomicCellDataCache>,
) -> Option<Output>
where
CellData: TryInto<TypeCellData, Error = FlowyError> + Debug,
Output: Default + 'static,
{
let to_field_type = field_rev.ty.into();
match data.try_into() {
Ok(type_cell_data) => {
let TypeCellData {
cell_str,
field_type,
} = type_cell_data;
try_decode_cell_str_to_cell_data(
cell_str,
&field_type,
&to_field_type,
field_rev,
cell_data_cache,
)
},
Err(_err) => None,
}
}
/// Decode the opaque cell data from one field type to another using the corresponding `TypeOption`
///
/// The cell data might become an empty string depends on the to_field_type's `TypeOption`
/// support transform the from_field_type's cell data or not.
///
/// # Arguments
///
/// * `cell_str`: the opaque cell string that can be decoded by corresponding structs that implement the
/// `FromCellString` trait.
/// * `from_field_type`: the original field type of the passed-in cell data. Check the `TypeCellData`
/// that is used to save the origin field type of the cell data.
/// * `to_field_type`: decode the passed-in cell data to this field type. It will use the to_field_type's
/// TypeOption to decode this cell data.
/// * `field_rev`: used to get the corresponding TypeOption for the specified field type.
///
/// returns: CellBytes
///
pub fn try_decode_cell_str_to_cell_protobuf(
cell_str: String,
from_field_type: &FieldType,
to_field_type: &FieldType,
field_rev: &FieldRevision,
cell_data_cache: Option<AtomicCellDataCache>,
) -> FlowyResult<CellProtobufBlob> {
match TypeOptionCellExt::new_with_cell_data_cache(field_rev, cell_data_cache)
.get_type_option_cell_data_handler(to_field_type)
{
None => Ok(CellProtobufBlob::default()),
Some(handler) => handler.handle_cell_str(cell_str, from_field_type, field_rev),
}
}
pub fn try_decode_cell_str_to_cell_data<T: Default + 'static>(
cell_str: String,
from_field_type: &FieldType,
to_field_type: &FieldType,
field_rev: &FieldRevision,
cell_data_cache: Option<AtomicCellDataCache>,
) -> Option<T> {
let handler = TypeOptionCellExt::new_with_cell_data_cache(field_rev, cell_data_cache)
.get_type_option_cell_data_handler(to_field_type)?;
handler
.get_cell_data(cell_str, from_field_type, field_rev)
.ok()?
.unbox_or_none::<T>()
}
/// Returns a string that represents the current field_type's cell data.
/// For example, The string of the Multi-Select cell will be a list of the option's name
/// separated by a comma.
///
/// # Arguments
///
/// * `cell_str`: the opaque cell string that can be decoded by corresponding structs that implement the
/// `FromCellString` trait.
/// * `decoded_field_type`: the field_type of the cell_str
/// * `field_type`: use this field type's `TypeOption` to stringify this cell_str
/// * `field_rev`: used to get the corresponding TypeOption for the specified field type.
///
/// returns: String
pub fn stringify_cell_data(
cell_str: String,
decoded_field_type: &FieldType,
field_type: &FieldType,
field_rev: &FieldRevision,
) -> String {
match TypeOptionCellExt::new_with_cell_data_cache(field_rev, None)
.get_type_option_cell_data_handler(field_type)
{
None => "".to_string(),
Some(handler) => handler.stringify_cell_str(cell_str, decoded_field_type, field_rev),
}
}
pub fn insert_text_cell(s: String, field_rev: &FieldRevision) -> CellRevision {
let data = apply_cell_data_changeset(s, None, field_rev, None).unwrap();
CellRevision::new(data)
}
pub fn insert_number_cell(num: i64, field_rev: &FieldRevision) -> CellRevision {
let data = apply_cell_data_changeset(num.to_string(), None, field_rev, None).unwrap();
CellRevision::new(data)
}
pub fn insert_url_cell(url: String, field_rev: &FieldRevision) -> CellRevision {
// checking if url is equal to group id of no status group because everywhere
// except group of rows with empty url the group id is equal to the url
// so then on the case that url is equal to empty url group id we should change
// the url to empty string
let _no_status_group_id = make_no_status_group(field_rev).id;
let url = match url {
a if a == _no_status_group_id => "".to_owned(),
_ => url,
};
let data = apply_cell_data_changeset(url, None, field_rev, None).unwrap();
CellRevision::new(data)
}
pub fn insert_checkbox_cell(is_check: bool, field_rev: &FieldRevision) -> CellRevision {
let s = if is_check {
CHECK.to_string()
} else {
UNCHECK.to_string()
};
let data = apply_cell_data_changeset(s, None, field_rev, None).unwrap();
CellRevision::new(data)
}
pub fn insert_date_cell(date_cell_data: DateCellData, field_rev: &FieldRevision) -> CellRevision {
let cell_data = serde_json::to_string(&DateCellChangeset {
date: date_cell_data.timestamp.map(|t| t.to_string()),
time: None,
include_time: Some(date_cell_data.include_time),
is_utc: true,
})
.unwrap();
let data = apply_cell_data_changeset(cell_data, None, field_rev, None).unwrap();
CellRevision::new(data)
}
pub fn insert_select_option_cell(
option_ids: Vec<String>,
field_rev: &FieldRevision,
) -> CellRevision {
let changeset =
SelectOptionCellChangeset::from_insert_options(option_ids).to_cell_changeset_str();
let data = apply_cell_data_changeset(changeset, None, field_rev, None).unwrap();
CellRevision::new(data)
}
pub fn delete_select_option_cell(
option_ids: Vec<String>,
field_rev: &FieldRevision,
) -> CellRevision {
let changeset =
SelectOptionCellChangeset::from_delete_options(option_ids).to_cell_changeset_str();
let data = apply_cell_data_changeset(changeset, None, field_rev, None).unwrap();
CellRevision::new(data)
}
/// Deserialize the String into cell specific data type.
pub trait FromCellString {
fn from_cell_str(s: &str) -> FlowyResult<Self>
where
Self: Sized;
}
/// If the changeset applying to the cell is not String type, it should impl this trait.
/// Deserialize the string into cell specific changeset.
pub trait FromCellChangesetString {
fn from_changeset(changeset: String) -> FlowyResult<Self>
where
Self: Sized;
}
impl FromCellChangesetString for String {
fn from_changeset(changeset: String) -> FlowyResult<Self>
where
Self: Sized,
{
Ok(changeset)
}
}
pub trait ToCellChangesetString: Debug {
fn to_cell_changeset_str(&self) -> String;
}
impl ToCellChangesetString for String {
fn to_cell_changeset_str(&self) -> String {
self.clone()
}
}
pub struct AnyCellChangeset<T>(pub Option<T>);
impl<T> AnyCellChangeset<T> {
pub fn try_into_inner(self) -> FlowyResult<T> {
match self.0 {
None => Err(ErrorCode::InvalidData.into()),
Some(data) => Ok(data),
}
}
}
impl<T, C: ToString> std::convert::From<C> for AnyCellChangeset<T>
where
T: FromCellChangesetString,
{
fn from(changeset: C) -> Self {
match T::from_changeset(changeset.to_string()) {
Ok(data) => AnyCellChangeset(Some(data)),
Err(e) => {
tracing::error!("Deserialize CellDataChangeset failed: {}", e);
AnyCellChangeset(None)
},
}
}
}
// impl std::convert::From<String> for AnyCellChangeset<String> {
// fn from(s: String) -> Self {
// AnyCellChangeset(Some(s))
// }
// }

View File

@ -1,7 +0,0 @@
mod cell_data_cache;
mod cell_operation;
mod type_cell_data;
pub use cell_data_cache::*;
pub use cell_operation::*;
pub use type_cell_data::*;

View File

@ -1,209 +0,0 @@
use crate::entities::FieldType;
use bytes::Bytes;
use database_model::CellRevision;
use flowy_error::{internal_error, FlowyError, FlowyResult};
use serde::{Deserialize, Serialize};
/// TypeCellData is a generic CellData, you can parse the type_cell_data according to the field_type.
/// The `data` is encoded by JSON format. You can use `IntoCellData` to decode the opaque data to
/// concrete cell type.
/// TypeCellData -> IntoCellData<T> -> T
///
/// The `TypeCellData` is the same as the cell data that was saved to disk except it carries the
/// field_type. The field_type indicates the cell data original `FieldType`. The field_type will
/// be changed if the current Field's type switch from one to another.
///
#[derive(Debug, Serialize, Deserialize)]
pub struct TypeCellData {
#[serde(rename = "data")]
pub cell_str: String,
pub field_type: FieldType,
}
impl TypeCellData {
pub fn from_field_type(field_type: &FieldType) -> TypeCellData {
Self {
cell_str: "".to_string(),
field_type: field_type.clone(),
}
}
pub fn from_json_str(s: &str) -> FlowyResult<Self> {
let type_cell_data: TypeCellData = serde_json::from_str(s).map_err(|err| {
let msg = format!("Deserialize {} to type cell data failed.{}", s, err);
FlowyError::internal().context(msg)
})?;
Ok(type_cell_data)
}
pub fn into_inner(self) -> String {
self.cell_str
}
}
impl std::convert::TryFrom<String> for TypeCellData {
type Error = FlowyError;
fn try_from(value: String) -> Result<Self, Self::Error> {
TypeCellData::from_json_str(&value)
}
}
impl ToString for TypeCellData {
fn to_string(&self) -> String {
self.cell_str.clone()
}
}
impl std::convert::TryFrom<&CellRevision> for TypeCellData {
type Error = FlowyError;
fn try_from(value: &CellRevision) -> Result<Self, Self::Error> {
Self::from_json_str(&value.type_cell_data)
}
}
impl std::convert::TryFrom<CellRevision> for TypeCellData {
type Error = FlowyError;
fn try_from(value: CellRevision) -> Result<Self, Self::Error> {
Self::try_from(&value)
}
}
impl TypeCellData {
pub fn new(cell_str: String, field_type: FieldType) -> Self {
TypeCellData {
cell_str,
field_type,
}
}
pub fn to_json(&self) -> String {
serde_json::to_string(self).unwrap_or_else(|_| "".to_owned())
}
pub fn is_number(&self) -> bool {
self.field_type == FieldType::Number
}
pub fn is_text(&self) -> bool {
self.field_type == FieldType::RichText
}
pub fn is_checkbox(&self) -> bool {
self.field_type == FieldType::Checkbox
}
pub fn is_date(&self) -> bool {
self.field_type == FieldType::DateTime
}
pub fn is_single_select(&self) -> bool {
self.field_type == FieldType::SingleSelect
}
pub fn is_multi_select(&self) -> bool {
self.field_type == FieldType::MultiSelect
}
pub fn is_checklist(&self) -> bool {
self.field_type == FieldType::Checklist
}
pub fn is_url(&self) -> bool {
self.field_type == FieldType::URL
}
pub fn is_select_option(&self) -> bool {
self.field_type == FieldType::MultiSelect || self.field_type == FieldType::SingleSelect
}
}
/// The data is encoded by protobuf or utf8. You should choose the corresponding decode struct to parse it.
///
/// For example:
///
/// * Use DateCellDataPB to parse the data when the FieldType is Date.
/// * Use URLCellDataPB to parse the data when the FieldType is URL.
/// * Use String to parse the data when the FieldType is RichText, Number, or Checkbox.
/// * Check out the implementation of CellDataOperation trait for more information.
#[derive(Default, Debug)]
pub struct CellProtobufBlob(pub Bytes);
pub trait DecodedCellData {
type Object;
fn is_empty(&self) -> bool;
}
pub trait CellProtobufBlobParser {
type Object: DecodedCellData;
fn parser(bytes: &Bytes) -> FlowyResult<Self::Object>;
}
pub trait CellStringParser {
type Object;
fn parser_cell_str(&self, s: &str) -> Option<Self::Object>;
}
pub trait CellBytesCustomParser {
type Object;
fn parse(&self, bytes: &Bytes) -> FlowyResult<Self::Object>;
}
impl CellProtobufBlob {
pub fn new<T: AsRef<[u8]>>(data: T) -> Self {
let bytes = Bytes::from(data.as_ref().to_vec());
Self(bytes)
}
pub fn from<T: TryInto<Bytes>>(bytes: T) -> FlowyResult<Self>
where
<T as TryInto<Bytes>>::Error: std::fmt::Debug,
{
let bytes = bytes.try_into().map_err(internal_error)?;
Ok(Self(bytes))
}
pub fn parser<P>(&self) -> FlowyResult<P::Object>
where
P: CellProtobufBlobParser,
{
P::parser(&self.0)
}
pub fn custom_parser<P>(&self, parser: P) -> FlowyResult<P::Object>
where
P: CellBytesCustomParser,
{
parser.parse(&self.0)
}
// pub fn parse<'a, T: TryFrom<&'a [u8]>>(&'a self) -> FlowyResult<T>
// where
// <T as TryFrom<&'a [u8]>>::Error: std::fmt::Debug,
// {
// T::try_from(self.0.as_ref()).map_err(internal_error)
// }
}
impl ToString for CellProtobufBlob {
fn to_string(&self) -> String {
match String::from_utf8(self.0.to_vec()) {
Ok(s) => s,
Err(e) => {
tracing::error!("DecodedCellData to string failed: {:?}", e);
"".to_string()
},
}
}
}
impl std::ops::Deref for CellProtobufBlob {
type Target = Bytes;
fn deref(&self) -> &Self::Target {
&self.0
}
}

View File

@ -1,234 +0,0 @@
use crate::services::database::retry::GetRowDataRetryAction;
use bytes::Bytes;
use database_model::{CellRevision, DatabaseBlockRevision, RowChangeset, RowRevision};
use flowy_client_sync::client_database::{
DatabaseBlockRevisionChangeset, DatabaseBlockRevisionPad,
};
use flowy_client_sync::make_operations_from_revisions;
use flowy_error::{FlowyError, FlowyResult};
use flowy_revision::{
RevisionCloudService, RevisionManager, RevisionMergeable, RevisionObjectDeserializer,
RevisionObjectSerializer,
};
use flowy_sqlite::ConnectionPool;
use lib_infra::future::FutureResult;
use lib_infra::retry::spawn_retry;
use lib_ot::core::EmptyAttributes;
use revision_model::Revision;
use std::borrow::Cow;
use std::sync::Arc;
use tokio::sync::RwLock;
pub struct DatabaseBlockEditor {
pub block_id: String,
pad: Arc<RwLock<DatabaseBlockRevisionPad>>,
rev_manager: Arc<RevisionManager<Arc<ConnectionPool>>>,
}
impl DatabaseBlockEditor {
pub async fn new(
token: &str,
block_id: &str,
mut rev_manager: RevisionManager<Arc<ConnectionPool>>,
) -> FlowyResult<Self> {
let cloud = Arc::new(DatabaseBlockRevisionCloudService {
token: token.to_owned(),
});
let block_revision_pad = rev_manager
.initialize::<DatabaseBlockRevisionSerde>(Some(cloud))
.await?;
let pad = Arc::new(RwLock::new(block_revision_pad));
let rev_manager = Arc::new(rev_manager);
let block_id = block_id.to_owned();
Ok(Self {
block_id,
pad,
rev_manager,
})
}
pub async fn close(&self) {
self.rev_manager.generate_snapshot().await;
self.rev_manager.close().await;
}
pub async fn duplicate_block(&self, duplicated_block_id: &str) -> DatabaseBlockRevision {
self.pad.read().await.duplicate_data(duplicated_block_id)
}
/// Create a row after the the with prev_row_id. If prev_row_id is None, the row will be appended to the list
pub(crate) async fn create_row(
&self,
row: RowRevision,
prev_row_id: Option<String>,
) -> FlowyResult<(i32, Option<i32>)> {
let mut row_count = 0;
let mut row_index = None;
self
.modify(|block_pad| {
if let Some(start_row_id) = prev_row_id.as_ref() {
match block_pad.index_of_row(start_row_id) {
None => {},
Some(index) => row_index = Some(index as i32 + 1),
}
}
let change = block_pad.add_row_rev(row, prev_row_id)?;
row_count = block_pad.number_of_rows();
if row_index.is_none() {
row_index = Some(row_count - 1);
}
Ok(change)
})
.await?;
Ok((row_count, row_index))
}
pub async fn delete_rows(&self, ids: Vec<Cow<'_, String>>) -> FlowyResult<i32> {
let mut row_count = 0;
self
.modify(|block_pad| {
let changeset = block_pad.delete_rows(ids)?;
row_count = block_pad.number_of_rows();
Ok(changeset)
})
.await?;
Ok(row_count)
}
pub async fn update_row(&self, changeset: RowChangeset) -> FlowyResult<()> {
self
.modify(|block_pad| Ok(block_pad.update_row(changeset)?))
.await?;
Ok(())
}
pub async fn move_row(&self, row_id: &str, from: usize, to: usize) -> FlowyResult<()> {
self
.modify(|block_pad| Ok(block_pad.move_row(row_id, from, to)?))
.await?;
Ok(())
}
pub async fn index_of_row(&self, row_id: &str) -> Option<usize> {
self.pad.read().await.index_of_row(row_id)
}
pub async fn number_of_rows(&self) -> i32 {
self.pad.read().await.rows.len() as i32
}
pub async fn get_row_rev(&self, row_id: &str) -> FlowyResult<Option<(usize, Arc<RowRevision>)>> {
if let Ok(pad) = self.pad.try_read() {
Ok(pad.get_row_rev(row_id))
} else {
let retry = GetRowDataRetryAction {
row_id: row_id.to_owned(),
pad: self.pad.clone(),
};
match spawn_retry(3, 300, retry).await {
Ok(value) => Ok(value),
Err(_) => {
tracing::error!("Required database block read lock failed");
Ok(None)
},
}
}
}
pub async fn get_row_revs<T>(
&self,
row_ids: Option<Vec<Cow<'_, T>>>,
) -> FlowyResult<Vec<Arc<RowRevision>>>
where
T: AsRef<str> + ToOwned + ?Sized,
{
let row_revs = self.pad.read().await.get_row_revs(row_ids)?;
Ok(row_revs)
}
pub async fn get_cell_revs(
&self,
field_id: &str,
row_ids: Option<Vec<Cow<'_, String>>>,
) -> FlowyResult<Vec<CellRevision>> {
let cell_revs = self.pad.read().await.get_cell_revs(field_id, row_ids)?;
Ok(cell_revs)
}
async fn modify<F>(&self, f: F) -> FlowyResult<()>
where
F: for<'a> FnOnce(
&'a mut DatabaseBlockRevisionPad,
) -> FlowyResult<Option<DatabaseBlockRevisionChangeset>>,
{
let mut write_guard = self.pad.write().await;
let changeset = f(&mut write_guard)?;
match changeset {
None => {},
Some(changeset) => {
self.apply_change(changeset).await?;
},
}
Ok(())
}
async fn apply_change(&self, change: DatabaseBlockRevisionChangeset) -> FlowyResult<()> {
let DatabaseBlockRevisionChangeset {
operations: delta,
md5,
} = change;
let data = delta.json_bytes();
let _ = self.rev_manager.add_local_revision(data, md5).await?;
Ok(())
}
}
struct DatabaseBlockRevisionCloudService {
#[allow(dead_code)]
token: String,
}
impl RevisionCloudService for DatabaseBlockRevisionCloudService {
#[tracing::instrument(level = "trace", skip(self))]
fn fetch_object(
&self,
_user_id: &str,
_object_id: &str,
) -> FutureResult<Vec<Revision>, FlowyError> {
FutureResult::new(async move { Ok(vec![]) })
}
}
struct DatabaseBlockRevisionSerde();
impl RevisionObjectDeserializer for DatabaseBlockRevisionSerde {
type Output = DatabaseBlockRevisionPad;
fn deserialize_revisions(
_object_id: &str,
revisions: Vec<Revision>,
) -> FlowyResult<Self::Output> {
let pad = DatabaseBlockRevisionPad::from_revisions(revisions)?;
Ok(pad)
}
fn recover_from_revisions(_revisions: Vec<Revision>) -> Option<(Self::Output, i64)> {
None
}
}
impl RevisionObjectSerializer for DatabaseBlockRevisionSerde {
fn combine_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
let operations = make_operations_from_revisions::<EmptyAttributes>(revisions)?;
Ok(operations.json_bytes())
}
}
pub struct DatabaseBlockRevisionMergeable();
impl RevisionMergeable for DatabaseBlockRevisionMergeable {
fn combine_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
DatabaseBlockRevisionSerde::combine_revisions(revisions)
}
}

View File

@ -1,353 +0,0 @@
use crate::entities::{CellChangesetPB, InsertedRowPB, UpdatedRowPB};
use crate::manager::DatabaseUser;
use crate::notification::{send_notification, DatabaseNotification};
use crate::services::database::{DatabaseBlockEditor, DatabaseBlockRevisionMergeable};
use crate::services::persistence::block_index::BlockRowIndexer;
use crate::services::persistence::rev_sqlite::{
SQLiteDatabaseBlockRevisionPersistence, SQLiteDatabaseRevisionSnapshotPersistence,
};
use crate::services::row::{make_row_from_row_rev, DatabaseBlockRow, DatabaseBlockRowRevision};
use dashmap::DashMap;
use database_model::{
DatabaseBlockMetaRevision, DatabaseBlockMetaRevisionChangeset, RowChangeset, RowRevision,
};
use flowy_error::FlowyResult;
use flowy_revision::{RevisionManager, RevisionPersistence, RevisionPersistenceConfiguration};
use flowy_sqlite::ConnectionPool;
use std::borrow::Cow;
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::broadcast;
#[derive(Debug, Clone)]
pub enum DatabaseBlockEvent {
InsertRow {
block_id: String,
row: InsertedRowPB,
},
UpdateRow {
block_id: String,
row: UpdatedRowPB,
},
DeleteRow {
block_id: String,
row_id: String,
},
Move {
block_id: String,
deleted_row_id: String,
inserted_row: InsertedRowPB,
},
}
type BlockId = String;
pub(crate) struct DatabaseBlocks {
user: Arc<dyn DatabaseUser>,
persistence: Arc<BlockRowIndexer>,
block_editors: DashMap<BlockId, Arc<DatabaseBlockEditor>>,
event_notifier: broadcast::Sender<DatabaseBlockEvent>,
}
impl DatabaseBlocks {
pub(crate) async fn new(
user: &Arc<dyn DatabaseUser>,
block_meta_revs: Vec<Arc<DatabaseBlockMetaRevision>>,
persistence: Arc<BlockRowIndexer>,
event_notifier: broadcast::Sender<DatabaseBlockEvent>,
) -> FlowyResult<Self> {
let block_editors = make_block_editors(user, block_meta_revs).await?;
let user = user.clone();
let manager = Self {
user,
block_editors,
persistence,
event_notifier,
};
Ok(manager)
}
pub async fn close(&self) {
for block_editor in self.block_editors.iter() {
block_editor.close().await;
}
}
// #[tracing::instrument(level = "trace", skip(self))]
pub(crate) async fn get_or_create_block_editor(
&self,
block_id: &str,
) -> FlowyResult<Arc<DatabaseBlockEditor>> {
debug_assert!(!block_id.is_empty());
match self.block_editors.get(block_id) {
None => {
tracing::error!(
"This is a fatal error, block with id:{} is not exist",
block_id
);
let editor = Arc::new(make_database_block_editor(&self.user, block_id).await?);
self
.block_editors
.insert(block_id.to_owned(), editor.clone());
Ok(editor)
},
Some(editor) => Ok(editor.clone()),
}
}
pub(crate) async fn get_editor_from_row_id(
&self,
row_id: &str,
) -> FlowyResult<Arc<DatabaseBlockEditor>> {
let block_id = self.persistence.get_block_id(row_id)?;
self.get_or_create_block_editor(&block_id).await
}
#[tracing::instrument(level = "trace", skip(self, start_row_id), err)]
pub(crate) async fn create_row(
&self,
row_rev: RowRevision,
start_row_id: Option<String>,
) -> FlowyResult<i32> {
let block_id = row_rev.block_id.clone();
self.persistence.insert(&row_rev.block_id, &row_rev.id)?;
let editor = self.get_or_create_block_editor(&row_rev.block_id).await?;
let mut row = InsertedRowPB::from(&row_rev);
let (number_of_rows, index) = editor.create_row(row_rev, start_row_id).await?;
row.index = index;
let _ = self
.event_notifier
.send(DatabaseBlockEvent::InsertRow { block_id, row });
Ok(number_of_rows)
}
pub(crate) async fn insert_row(
&self,
rows_by_block_id: HashMap<String, Vec<RowRevision>>,
) -> FlowyResult<Vec<DatabaseBlockMetaRevisionChangeset>> {
let mut changesets = vec![];
for (block_id, row_revs) in rows_by_block_id {
let editor = self.get_or_create_block_editor(&block_id).await?;
for row_rev in row_revs {
self.persistence.insert(&row_rev.block_id, &row_rev.id)?;
let mut row = InsertedRowPB::from(&row_rev);
row.index = editor.create_row(row_rev, None).await?.1;
let _ = self.event_notifier.send(DatabaseBlockEvent::InsertRow {
block_id: block_id.clone(),
row,
});
}
changesets.push(DatabaseBlockMetaRevisionChangeset::from_row_count(
block_id.clone(),
editor.number_of_rows().await,
));
}
Ok(changesets)
}
pub async fn update_row(&self, changeset: RowChangeset) -> FlowyResult<()> {
let editor = self.get_editor_from_row_id(&changeset.row_id).await?;
editor.update_row(changeset.clone()).await?;
match editor.get_row_rev(&changeset.row_id).await? {
None => tracing::error!(
"Update row failed, can't find the row with id: {}",
changeset.row_id
),
Some((_, row_rev)) => {
let changed_field_ids = changeset
.cell_by_field_id
.keys()
.cloned()
.collect::<Vec<String>>();
let row = UpdatedRowPB {
row: make_row_from_row_rev(row_rev),
field_ids: changed_field_ids,
};
let _ = self.event_notifier.send(DatabaseBlockEvent::UpdateRow {
block_id: editor.block_id.clone(),
row,
});
},
}
Ok(())
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub async fn delete_row(&self, row_id: &str) -> FlowyResult<Option<Arc<RowRevision>>> {
let row_id = row_id.to_owned();
let block_id = self.persistence.get_block_id(&row_id)?;
let editor = self.get_or_create_block_editor(&block_id).await?;
match editor.get_row_rev(&row_id).await? {
None => Ok(None),
Some((_, row_rev)) => {
let _ = editor.delete_rows(vec![Cow::Borrowed(&row_id)]).await?;
let _ = self.event_notifier.send(DatabaseBlockEvent::DeleteRow {
block_id: editor.block_id.clone(),
row_id: row_rev.id.clone(),
});
Ok(Some(row_rev))
},
}
}
pub(crate) async fn delete_rows(
&self,
block_rows: Vec<DatabaseBlockRow>,
) -> FlowyResult<Vec<DatabaseBlockMetaRevisionChangeset>> {
let mut changesets = vec![];
for block_row in block_rows {
let editor = self.get_or_create_block_editor(&block_row.block_id).await?;
let row_ids = block_row
.row_ids
.into_iter()
.map(Cow::Owned)
.collect::<Vec<Cow<String>>>();
let row_count = editor.delete_rows(row_ids).await?;
let changeset =
DatabaseBlockMetaRevisionChangeset::from_row_count(block_row.block_id, row_count);
changesets.push(changeset);
}
Ok(changesets)
}
// This function will be moved to GridViewRevisionEditor
pub(crate) async fn move_row(
&self,
row_rev: Arc<RowRevision>,
from: usize,
to: usize,
) -> FlowyResult<()> {
let editor = self.get_editor_from_row_id(&row_rev.id).await?;
editor.move_row(&row_rev.id, from, to).await?;
let delete_row_id = row_rev.id.clone();
let insert_row = InsertedRowPB {
index: Some(to as i32),
row: make_row_from_row_rev(row_rev),
is_new: false,
};
let _ = self.event_notifier.send(DatabaseBlockEvent::Move {
block_id: editor.block_id.clone(),
deleted_row_id: delete_row_id,
inserted_row: insert_row,
});
Ok(())
}
// This function will be moved to GridViewRevisionEditor.
pub async fn index_of_row(&self, row_id: &str) -> Option<usize> {
match self.get_editor_from_row_id(row_id).await {
Ok(editor) => editor.index_of_row(row_id).await,
Err(_) => None,
}
}
pub async fn update_cell(&self, changeset: CellChangesetPB) -> FlowyResult<()> {
let row_changeset: RowChangeset = changeset.clone().into();
self.update_row(row_changeset).await?;
self.notify_did_update_cell(changeset).await?;
Ok(())
}
pub async fn get_row_rev(&self, row_id: &str) -> FlowyResult<Option<(usize, Arc<RowRevision>)>> {
let editor = self.get_editor_from_row_id(row_id).await?;
editor.get_row_rev(row_id).await
}
#[allow(dead_code)]
pub async fn get_row_revs(&self) -> FlowyResult<Vec<Arc<RowRevision>>> {
let mut row_revs = vec![];
for iter in self.block_editors.iter() {
let editor = iter.value();
row_revs.extend(editor.get_row_revs::<&str>(None).await?);
}
Ok(row_revs)
}
pub(crate) async fn get_blocks(
&self,
block_ids: Option<Vec<String>>,
) -> FlowyResult<Vec<DatabaseBlockRowRevision>> {
let mut blocks = vec![];
match block_ids {
None => {
for iter in self.block_editors.iter() {
let editor = iter.value();
let block_id = editor.block_id.clone();
let row_revs = editor.get_row_revs::<&str>(None).await?;
blocks.push(DatabaseBlockRowRevision { block_id, row_revs });
}
},
Some(block_ids) => {
for block_id in block_ids {
let editor = self.get_or_create_block_editor(&block_id).await?;
let row_revs = editor.get_row_revs::<&str>(None).await?;
blocks.push(DatabaseBlockRowRevision { block_id, row_revs });
}
},
}
Ok(blocks)
}
async fn notify_did_update_cell(&self, changeset: CellChangesetPB) -> FlowyResult<()> {
let id = format!("{}:{}", changeset.row_id, changeset.field_id);
send_notification(&id, DatabaseNotification::DidUpdateCell).send();
Ok(())
}
}
/// Initialize each block editor
async fn make_block_editors(
user: &Arc<dyn DatabaseUser>,
block_meta_revs: Vec<Arc<DatabaseBlockMetaRevision>>,
) -> FlowyResult<DashMap<String, Arc<DatabaseBlockEditor>>> {
let editor_map = DashMap::new();
for block_meta_rev in block_meta_revs {
let editor = make_database_block_editor(user, &block_meta_rev.block_id).await?;
editor_map.insert(block_meta_rev.block_id.clone(), Arc::new(editor));
}
Ok(editor_map)
}
async fn make_database_block_editor(
user: &Arc<dyn DatabaseUser>,
block_id: &str,
) -> FlowyResult<DatabaseBlockEditor> {
tracing::trace!("Open block:{} editor", block_id);
let token = user.token()?;
let rev_manager = make_database_block_rev_manager(user, block_id)?;
DatabaseBlockEditor::new(&token, block_id, rev_manager).await
}
pub fn make_database_block_rev_manager(
user: &Arc<dyn DatabaseUser>,
block_id: &str,
) -> FlowyResult<RevisionManager<Arc<ConnectionPool>>> {
// Create revision persistence
let pool = user.db_pool()?;
let disk_cache = SQLiteDatabaseBlockRevisionPersistence::new(pool.clone());
let configuration = RevisionPersistenceConfiguration::new(4, false);
let rev_persistence = RevisionPersistence::new(block_id, disk_cache, configuration);
// Create snapshot persistence
const DATABASE_BLOCK_SP_PREFIX: &str = "grid_block";
let snapshot_object_id = format!("{}:{}", DATABASE_BLOCK_SP_PREFIX, block_id);
let snapshot_persistence =
SQLiteDatabaseRevisionSnapshotPersistence::new(&snapshot_object_id, pool);
let rev_compress = DatabaseBlockRevisionMergeable();
let rev_manager = RevisionManager::new(
block_id,
rev_persistence,
rev_compress,
snapshot_persistence,
);
Ok(rev_manager)
}

Some files were not shown because too many files have changed in this diff Show More