chore: add sqlite feature flag (#1683)

* chore: add sqlite feature flag

* chore: fix clippy warings

* ci: fix clippy

* chore: add rev file persistence

* ci: fix clippy

Co-authored-by: nathan <nathan@appflowy.io>
This commit is contained in:
Nathan.fooo 2023-01-12 13:09:08 +08:00 committed by GitHub
parent 860c5d100b
commit 6a36bcd31d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
74 changed files with 281 additions and 226 deletions

View File

@ -66,19 +66,19 @@ jobs:
working-directory: frontend/rust-lib/ working-directory: frontend/rust-lib/
- name: clippy rust-lib - name: clippy rust-lib
run: cargo clippy --no-default-features run: cargo clippy --features="rev-sqlite"
working-directory: frontend/rust-lib working-directory: frontend/rust-lib
- name: Run rust-lib tests - name: Run rust-lib tests
working-directory: frontend/rust-lib working-directory: frontend/rust-lib
run: RUST_LOG=info cargo test --no-default-features --features="sync" run: RUST_LOG=info cargo test --no-default-features --features="sync,rev-sqlite"
- name: rustfmt shared-lib - name: rustfmt shared-lib
run: cargo fmt --all -- --check run: cargo fmt --all -- --check
working-directory: shared-lib working-directory: shared-lib
- name: clippy shared-lib - name: clippy shared-lib
run: cargo clippy --no-default-features run: cargo clippy -- -D warnings
working-directory: shared-lib working-directory: shared-lib
- name: Run shared-lib tests - name: Run shared-lib tests

View File

@ -23,7 +23,7 @@ CARGO_MAKE_CRATE_FS_NAME = "dart_ffi"
CARGO_MAKE_CRATE_NAME = "dart-ffi" CARGO_MAKE_CRATE_NAME = "dart-ffi"
LIB_NAME = "dart_ffi" LIB_NAME = "dart_ffi"
CURRENT_APP_VERSION = "0.0.9.1" CURRENT_APP_VERSION = "0.0.9.1"
FEATURES = "flutter" FLUTTER_DESKTOP_FEATURES = "flutter,rev-sqlite"
PRODUCT_NAME = "AppFlowy" PRODUCT_NAME = "AppFlowy"
# CRATE_TYPE: https://doc.rust-lang.org/reference/linkage.html # CRATE_TYPE: https://doc.rust-lang.org/reference/linkage.html
# If you update the macOS's CRATE_TYPE, don't forget to update the # If you update the macOS's CRATE_TYPE, don't forget to update the
@ -150,7 +150,7 @@ script = ['''
echo BUILD_FLAG: ${BUILD_FLAG} echo BUILD_FLAG: ${BUILD_FLAG}
echo TARGET_OS: ${TARGET_OS} echo TARGET_OS: ${TARGET_OS}
echo RUST_COMPILE_TARGET: ${RUST_COMPILE_TARGET} echo RUST_COMPILE_TARGET: ${RUST_COMPILE_TARGET}
echo FEATURES: ${FEATURES} echo FEATURES: ${FLUTTER_DESKTOP_FEATURES}
echo PRODUCT_EXT: ${PRODUCT_EXT} echo PRODUCT_EXT: ${PRODUCT_EXT}
echo APP_ENVIRONMENT: ${APP_ENVIRONMENT} echo APP_ENVIRONMENT: ${APP_ENVIRONMENT}
echo ${platforms} echo ${platforms}

View File

@ -884,8 +884,12 @@ dependencies = [
"diesel", "diesel",
"diesel_derives", "diesel_derives",
"diesel_migrations", "diesel_migrations",
"error-chain",
"lazy_static", "lazy_static",
"lib-sqlite", "libsqlite3-sys",
"openssl",
"r2d2",
"scheduled-thread-pool",
"tracing", "tracing",
] ]
@ -928,6 +932,7 @@ dependencies = [
"flowy-error", "flowy-error",
"flowy-http-model", "flowy-http-model",
"flowy-revision", "flowy-revision",
"flowy-revision-persistence",
"flowy-sync", "flowy-sync",
"flowy-test", "flowy-test",
"futures", "futures",
@ -964,7 +969,6 @@ dependencies = [
"http-flowy", "http-flowy",
"lib-dispatch", "lib-dispatch",
"lib-ot", "lib-ot",
"lib-sqlite",
"protobuf", "protobuf",
"r2d2", "r2d2",
"serde_json", "serde_json",
@ -987,6 +991,7 @@ dependencies = [
"flowy-folder", "flowy-folder",
"flowy-http-model", "flowy-http-model",
"flowy-revision", "flowy-revision",
"flowy-revision-persistence",
"flowy-sync", "flowy-sync",
"flowy-test", "flowy-test",
"folder-rev-model", "folder-rev-model",
@ -1029,6 +1034,7 @@ dependencies = [
"flowy-grid", "flowy-grid",
"flowy-http-model", "flowy-http-model",
"flowy-revision", "flowy-revision",
"flowy-revision-persistence",
"flowy-sync", "flowy-sync",
"flowy-task", "flowy-task",
"flowy-test", "flowy-test",
@ -1116,6 +1122,7 @@ dependencies = [
"flowy-error", "flowy-error",
"flowy-http-model", "flowy-http-model",
"flowy-revision", "flowy-revision",
"flowy-revision-persistence",
"futures", "futures",
"futures-util", "futures-util",
"lib-infra", "lib-infra",
@ -1130,6 +1137,14 @@ dependencies = [
"tracing", "tracing",
] ]
[[package]]
name = "flowy-revision-persistence"
version = "0.1.0"
dependencies = [
"flowy-error",
"flowy-http-model",
]
[[package]] [[package]]
name = "flowy-sync" name = "flowy-sync"
version = "0.1.0" version = "0.1.0"
@ -1831,22 +1846,6 @@ dependencies = [
"tracing", "tracing",
] ]
[[package]]
name = "lib-sqlite"
version = "0.1.0"
dependencies = [
"diesel",
"diesel_derives",
"diesel_migrations",
"error-chain",
"lazy_static",
"libsqlite3-sys",
"log",
"openssl",
"r2d2",
"scheduled-thread-pool",
]
[[package]] [[package]]
name = "lib-ws" name = "lib-ws"
version = "0.1.0" version = "0.1.0"

View File

@ -2,7 +2,6 @@
members = [ members = [
"lib-dispatch", "lib-dispatch",
"lib-log", "lib-log",
"lib-sqlite",
"flowy-net", "flowy-net",
"flowy-core", "flowy-core",
"dart-ffi", "dart-ffi",
@ -14,6 +13,7 @@ members = [
"flowy-document", "flowy-document",
"flowy-error", "flowy-error",
"flowy-revision", "flowy-revision",
"flowy-revision-persistence",
"flowy-grid", "flowy-grid",
"flowy-task", "flowy-task",
"flowy-sync", "flowy-sync",

View File

@ -31,8 +31,9 @@ dart-notify = { path = "../dart-notify" }
flowy-derive = { path = "../flowy-derive" } flowy-derive = { path = "../flowy-derive" }
[features] [features]
default = ["flowy-core/dart", "flutter"] default = ["flowy-core/dart", "flutter", "rev-sqlite"]
flutter = [] flutter = []
rev-sqlite = ["flowy-core/rev-sqlite"]
http_sync = ["flowy-core/http_sync", "flowy-core/use_bunyan"] http_sync = ["flowy-core/http_sync", "flowy-core/use_bunyan"]
openssl_vendored = ["flowy-core/openssl_vendored"] openssl_vendored = ["flowy-core/openssl_vendored"]

View File

@ -13,7 +13,7 @@ flowy-net = { path = "../flowy-net" }
flowy-folder = { path = "../flowy-folder", default-features = false } flowy-folder = { path = "../flowy-folder", default-features = false }
flowy-grid = { path = "../flowy-grid", default-features = false } flowy-grid = { path = "../flowy-grid", default-features = false }
grid-rev-model = { path = "../../../shared-lib/grid-rev-model" } grid-rev-model = { path = "../../../shared-lib/grid-rev-model" }
flowy-database = { path = "../flowy-database" } flowy-database = { path = "../flowy-database", optional = true }
flowy-document = { path = "../flowy-document", default-features = false } flowy-document = { path = "../flowy-document", default-features = false }
flowy-revision = { path = "../flowy-revision" } flowy-revision = { path = "../flowy-revision" }
flowy-task = { path = "../flowy-task" } flowy-task = { path = "../flowy-task" }
@ -39,4 +39,11 @@ dart = [
"flowy-grid/dart", "flowy-grid/dart",
"flowy-document/dart", "flowy-document/dart",
] ]
rev-sqlite = [
"flowy-database",
"flowy-user/rev-sqlite",
"flowy-folder/rev-sqlite",
"flowy-grid/rev-sqlite",
"flowy-document/rev-sqlite",
]
openssl_vendored = ["flowy-database/openssl_vendored"] openssl_vendored = ["flowy-database/openssl_vendored"]

View File

@ -79,7 +79,7 @@ impl FlowySDKConfig {
fn crate_log_filter(level: String) -> String { fn crate_log_filter(level: String) -> String {
let level = std::env::var("RUST_LOG").unwrap_or(level); let level = std::env::var("RUST_LOG").unwrap_or(level);
let mut filters = vec![]; let mut filters = vec![];
filters.push(format!("flowy_sdk={}", level)); filters.push(format!("flowy_core={}", level));
filters.push(format!("flowy_folder={}", level)); filters.push(format!("flowy_folder={}", level));
filters.push(format!("flowy_user={}", level)); filters.push(format!("flowy_user={}", level));
filters.push(format!("flowy_document={}", level)); filters.push(format!("flowy_document={}", level));
@ -95,7 +95,7 @@ fn crate_log_filter(level: String) -> String {
// filters.push(format!("lib_dispatch={}", level)); // filters.push(format!("lib_dispatch={}", level));
filters.push(format!("dart_ffi={}", "info")); filters.push(format!("dart_ffi={}", "info"));
filters.push(format!("flowy_database={}", level)); filters.push(format!("flowy_database={}", "info"));
filters.push(format!("flowy_net={}", "info")); filters.push(format!("flowy_net={}", "info"));
filters.join(",") filters.join(",")
} }

View File

@ -9,9 +9,14 @@ edition = "2018"
diesel = { version = "1.4.8", features = ["sqlite"] } diesel = { version = "1.4.8", features = ["sqlite"] }
diesel_derives = { version = "1.4.1", features = ["sqlite"] } diesel_derives = { version = "1.4.1", features = ["sqlite"] }
diesel_migrations = { version = "1.4.0", features = ["sqlite"] } diesel_migrations = { version = "1.4.0", features = ["sqlite"] }
lib-sqlite = { path = "../lib-sqlite" }
tracing = { version = "0.1", features = ["log"] } tracing = { version = "0.1", features = ["log"] }
lazy_static = "1.4.0" lazy_static = "1.4.0"
r2d2 = "0.8.9"
libsqlite3-sys = { version = ">=0.8.0, <0.24.0", features = ["bundled"] }
scheduled-thread-pool = "0.2.5"
error-chain = "=0.12.0"
openssl = { version = "0.10.38", optional = true }
[features] [features]
openssl_vendored = ["lib-sqlite/openssl_vendored"] openssl_vendored = ["openssl/vendored"]

View File

@ -1,8 +1,8 @@
use crate::kv::schema::{kv_table, kv_table::dsl, KV_SQL}; use crate::kv::schema::{kv_table, kv_table::dsl, KV_SQL};
use crate::sqlite::{DBConnection, Database, PoolConfig};
use ::diesel::{query_dsl::*, ExpressionMethods}; use ::diesel::{query_dsl::*, ExpressionMethods};
use diesel::{Connection, SqliteConnection}; use diesel::{Connection, SqliteConnection};
use lazy_static::lazy_static; use lazy_static::lazy_static;
use lib_sqlite::{DBConnection, Database, PoolConfig};
use std::{path::Path, sync::RwLock}; use std::{path::Path, sync::RwLock};
macro_rules! impl_get_func { macro_rules! impl_get_func {

View File

@ -3,13 +3,16 @@ pub use diesel_derives::*;
use diesel_migrations::*; use diesel_migrations::*;
use std::{fmt::Debug, io, path::Path}; use std::{fmt::Debug, io, path::Path};
pub mod kv; pub mod kv;
mod sqlite;
use crate::sqlite::PoolConfig;
pub use crate::sqlite::{ConnectionPool, DBConnection, Database};
use lib_sqlite::PoolConfig;
pub use lib_sqlite::{ConnectionPool, DBConnection, Database};
pub mod schema; pub mod schema;
#[macro_use] #[macro_use]
pub mod macros; pub mod macros;
#[macro_use] #[macro_use]
extern crate diesel; extern crate diesel;
#[macro_use] #[macro_use]
@ -41,7 +44,7 @@ pub fn init(storage_path: &str) -> Result<Database, io::Error> {
fn as_io_error<E>(e: E) -> io::Error fn as_io_error<E>(e: E) -> io::Error
where where
E: Into<lib_sqlite::Error> + Debug, E: Into<crate::sqlite::Error> + Debug,
{ {
let msg = format!("{:?}", e); let msg = format!("{:?}", e);
io::Error::new(io::ErrorKind::NotConnected, msg) io::Error::new(io::ErrorKind::NotConnected, msg)

View File

@ -1,4 +1,4 @@
use crate::errors::*; use crate::sqlite::errors::*;
use diesel::{dsl::sql, expression::SqlLiteral, query_dsl::LoadQuery, Connection, RunQueryDsl, SqliteConnection}; use diesel::{dsl::sql, expression::SqlLiteral, query_dsl::LoadQuery, Connection, RunQueryDsl, SqliteConnection};
pub trait ConnectionExtension: Connection { pub trait ConnectionExtension: Connection {

View File

@ -1,4 +1,4 @@
use crate::{ use crate::sqlite::{
errors::*, errors::*,
pool::{ConnectionManager, ConnectionPool, PoolConfig}, pool::{ConnectionManager, ConnectionPool, PoolConfig},
}; };
@ -17,7 +17,7 @@ impl Database {
let uri = db_file_uri(dir, name); let uri = db_file_uri(dir, name);
if !std::path::PathBuf::from(dir).exists() { if !std::path::PathBuf::from(dir).exists() {
log::error!("Create database failed. {} not exists", &dir); tracing::error!("Create database failed. {} not exists", &dir);
} }
let pool = ConnectionPool::new(pool_config, &uri)?; let pool = ConnectionPool::new(pool_config, &uri)?;

View File

@ -1,4 +1,4 @@
use crate::{errors::*, pragma::*}; use crate::sqlite::{errors::*, pragma::*};
use diesel::{connection::Connection, SqliteConnection}; use diesel::{connection::Connection, SqliteConnection};
use r2d2::{CustomizeConnection, ManageConnection, Pool}; use r2d2::{CustomizeConnection, ManageConnection, Pool};
use scheduled_thread_pool::ScheduledThreadPool; use scheduled_thread_pool::ScheduledThreadPool;
@ -45,6 +45,7 @@ impl ConnectionPool {
} }
} }
#[allow(dead_code)]
pub type OnExecFunc = Box<dyn Fn() -> Box<dyn Fn(&SqliteConnection, &str)> + Send + Sync>; pub type OnExecFunc = Box<dyn Fn() -> Box<dyn Fn(&SqliteConnection, &str)> + Send + Sync>;
pub struct PoolConfig { pub struct PoolConfig {
@ -85,7 +86,7 @@ pub struct ConnectionManager {
impl ManageConnection for ConnectionManager { impl ManageConnection for ConnectionManager {
type Connection = SqliteConnection; type Connection = SqliteConnection;
type Error = crate::Error; type Error = crate::sqlite::Error;
fn connect(&self) -> Result<Self::Connection> { fn connect(&self) -> Result<Self::Connection> {
Ok(SqliteConnection::establish(&self.db_uri)?) Ok(SqliteConnection::establish(&self.db_uri)?)
@ -140,7 +141,7 @@ impl DatabaseCustomizer {
} }
} }
impl CustomizeConnection<SqliteConnection, crate::Error> for DatabaseCustomizer { impl CustomizeConnection<SqliteConnection, crate::sqlite::Error> for DatabaseCustomizer {
fn on_acquire(&self, conn: &mut SqliteConnection) -> Result<()> { fn on_acquire(&self, conn: &mut SqliteConnection) -> Result<()> {
conn.pragma_set_busy_timeout(self.config.busy_timeout)?; conn.pragma_set_busy_timeout(self.config.busy_timeout)?;
if self.config.journal_mode != SQLiteJournalMode::WAL { if self.config.journal_mode != SQLiteJournalMode::WAL {

View File

@ -1,5 +1,5 @@
#![allow(clippy::upper_case_acronyms)] #![allow(clippy::upper_case_acronyms)]
use crate::errors::{Error, Result}; use crate::sqlite::errors::{Error, Result};
use diesel::{ use diesel::{
expression::SqlLiteral, expression::SqlLiteral,
query_dsl::load_dsl::LoadQuery, query_dsl::load_dsl::LoadQuery,
@ -7,7 +7,7 @@ use diesel::{
SqliteConnection, SqliteConnection,
}; };
use crate::conn_ext::ConnectionExtension; use crate::sqlite::conn_ext::ConnectionExtension;
use std::{ use std::{
convert::{TryFrom, TryInto}, convert::{TryFrom, TryInto},
fmt, fmt,
@ -20,7 +20,7 @@ pub trait PragmaExtension: ConnectionExtension {
Some(schema) => format!("PRAGMA {}.{} = '{}'", schema, key, val), Some(schema) => format!("PRAGMA {}.{} = '{}'", schema, key, val),
None => format!("PRAGMA {} = '{}'", key, val), None => format!("PRAGMA {} = '{}'", key, val),
}; };
log::trace!("SQLITE {}", query); tracing::trace!("SQLITE {}", query);
self.exec(&query)?; self.exec(&query)?;
Ok(()) Ok(())
} }
@ -33,7 +33,7 @@ pub trait PragmaExtension: ConnectionExtension {
Some(schema) => format!("PRAGMA {}.{} = '{}'", schema, key, val), Some(schema) => format!("PRAGMA {}.{} = '{}'", schema, key, val),
None => format!("PRAGMA {} = '{}'", key, val), None => format!("PRAGMA {} = '{}'", key, val),
}; };
log::trace!("SQLITE {}", query); tracing::trace!("SQLITE {}", query);
self.query::<ST, T>(&query) self.query::<ST, T>(&query)
} }
@ -45,7 +45,7 @@ pub trait PragmaExtension: ConnectionExtension {
Some(schema) => format!("PRAGMA {}.{}", schema, key), Some(schema) => format!("PRAGMA {}.{}", schema, key),
None => format!("PRAGMA {}", key), None => format!("PRAGMA {}", key),
}; };
log::trace!("SQLITE {}", query); tracing::trace!("SQLITE {}", query);
self.query::<ST, T>(&query) self.query::<ST, T>(&query)
} }

View File

@ -15,8 +15,9 @@ lib-ws = { path = "../../../shared-lib/lib-ws" }
lib-infra = { path = "../../../shared-lib/lib-infra" } lib-infra = { path = "../../../shared-lib/lib-infra" }
lib-dispatch = { path = "../lib-dispatch" } lib-dispatch = { path = "../lib-dispatch" }
flowy-database = { path = "../flowy-database" } flowy-database = { path = "../flowy-database", optional = true }
flowy-revision = { path = "../flowy-revision" } flowy-revision = { path = "../flowy-revision" }
flowy-revision-persistence = { path = "../flowy-revision-persistence" }
flowy-error = { path = "../flowy-error", features = ["collaboration", "ot", "http_server", "serde", "db"] } flowy-error = { path = "../flowy-error", features = ["collaboration", "ot", "http_server", "serde", "db"] }
dart-notify = { path = "../dart-notify" } dart-notify = { path = "../dart-notify" }
@ -58,5 +59,6 @@ flowy-codegen = { path = "../flowy-codegen"}
[features] [features]
sync = [] sync = []
cloud_sync = ["sync"] cloud_sync = ["sync"]
rev-sqlite = ["flowy-database"]
flowy_unit_test = ["lib-ot/flowy_unit_test", "flowy-revision/flowy_unit_test"] flowy_unit_test = ["lib-ot/flowy_unit_test", "flowy-revision/flowy_unit_test"]
dart = ["flowy-codegen/dart", "dart-notify/dart"] dart = ["flowy-codegen/dart", "dart-notify/dart"]

View File

@ -6,7 +6,7 @@ use flowy_database::kv::KV;
use flowy_error::FlowyResult; use flowy_error::FlowyResult;
use flowy_http_model::revision::Revision; use flowy_http_model::revision::Revision;
use flowy_http_model::util::md5; use flowy_http_model::util::md5;
use flowy_revision::disk::{RevisionDiskCache, SyncRecord}; use flowy_revision_persistence::{RevisionDiskCache, SyncRecord};
use flowy_sync::util::make_operations_from_revisions; use flowy_sync::util::make_operations_from_revisions;
use std::sync::Arc; use std::sync::Arc;

View File

@ -11,7 +11,7 @@ use flowy_http_model::{
revision::{Revision, RevisionRange}, revision::{Revision, RevisionRange},
util::md5, util::md5,
}; };
use flowy_revision::disk::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord}; use flowy_revision_persistence::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord};
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
@ -25,7 +25,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteDeltaDocumentRevisionPersi
fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> { fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?; let conn = self.pool.get().map_err(internal_error)?;
DeltaRevisionSql::create(revision_records, &*conn)?; DeltaRevisionSql::create(revision_records, &conn)?;
Ok(()) Ok(())
} }
@ -57,7 +57,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteDeltaDocumentRevisionPersi
let conn = &*self.pool.get().map_err(internal_error)?; let conn = &*self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| { conn.immediate_transaction::<_, FlowyError, _>(|| {
for changeset in changesets { for changeset in changesets {
let _ = DeltaRevisionSql::update(changeset, conn)?; DeltaRevisionSql::update(changeset, conn)?;
} }
Ok(()) Ok(())
})?; })?;
@ -78,8 +78,8 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteDeltaDocumentRevisionPersi
) -> Result<(), Self::Error> { ) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?; let conn = self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| { conn.immediate_transaction::<_, FlowyError, _>(|| {
DeltaRevisionSql::delete(object_id, deleted_rev_ids, &*conn)?; DeltaRevisionSql::delete(object_id, deleted_rev_ids, &conn)?;
DeltaRevisionSql::create(inserted_records, &*conn)?; DeltaRevisionSql::create(inserted_records, &conn)?;
Ok(()) Ok(())
}) })
} }

View File

@ -11,7 +11,7 @@ use flowy_http_model::{
revision::{Revision, RevisionRange}, revision::{Revision, RevisionRange},
util::md5, util::md5,
}; };
use flowy_revision::disk::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord}; use flowy_revision_persistence::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord};
use std::sync::Arc; use std::sync::Arc;
pub struct SQLiteDocumentRevisionPersistence { pub struct SQLiteDocumentRevisionPersistence {
@ -24,7 +24,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteDocumentRevisionPersistenc
fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> { fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?; let conn = self.pool.get().map_err(internal_error)?;
DocumentRevisionSql::create(revision_records, &*conn)?; DocumentRevisionSql::create(revision_records, &conn)?;
Ok(()) Ok(())
} }
@ -56,7 +56,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteDocumentRevisionPersistenc
let conn = &*self.pool.get().map_err(internal_error)?; let conn = &*self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| { conn.immediate_transaction::<_, FlowyError, _>(|| {
for changeset in changesets { for changeset in changesets {
let _ = DocumentRevisionSql::update(changeset, conn)?; DocumentRevisionSql::update(changeset, conn)?;
} }
Ok(()) Ok(())
})?; })?;
@ -77,8 +77,8 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteDocumentRevisionPersistenc
) -> Result<(), Self::Error> { ) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?; let conn = self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| { conn.immediate_transaction::<_, FlowyError, _>(|| {
DocumentRevisionSql::delete(object_id, deleted_rev_ids, &*conn)?; DocumentRevisionSql::delete(object_id, deleted_rev_ids, &conn)?;
DocumentRevisionSql::create(inserted_records, &*conn)?; DocumentRevisionSql::create(inserted_records, &conn)?;
Ok(()) Ok(())
}) })
} }

View File

@ -1,5 +1,5 @@
use crate::old_document::script::{EditorScript::*, *}; use crate::old_document::script::{EditorScript::*, *};
use flowy_revision::disk::RevisionState; use flowy_revision_persistence::RevisionState;
use lib_ot::core::{count_utf16_code_units, Interval}; use lib_ot::core::{count_utf16_code_units, Interval};
#[tokio::test] #[tokio::test]

View File

@ -1,6 +1,6 @@
use flowy_document::old_editor::editor::DeltaDocumentEditor; use flowy_document::old_editor::editor::DeltaDocumentEditor;
use flowy_document::TEXT_BLOCK_SYNC_INTERVAL_IN_MILLIS; use flowy_document::TEXT_BLOCK_SYNC_INTERVAL_IN_MILLIS;
use flowy_revision::disk::RevisionState; use flowy_revision_persistence::RevisionState;
use flowy_test::{helper::ViewTest, FlowySDKTest}; use flowy_test::{helper::ViewTest, FlowySDKTest};
use lib_ot::{core::Interval, text_delta::DeltaTextOperations}; use lib_ot::{core::Interval, text_delta::DeltaTextOperations};
use std::sync::Arc; use std::sync::Arc;

View File

@ -20,14 +20,13 @@ serde_json = {version = "1.0", optional = true}
http-flowy = { git = "https://github.com/AppFlowy-IO/AppFlowy-Server", optional = true} http-flowy = { git = "https://github.com/AppFlowy-IO/AppFlowy-Server", optional = true}
flowy-database = { path = "../flowy-database", optional = true} flowy-database = { path = "../flowy-database", optional = true}
r2d2 = { version = "0.8", optional = true} r2d2 = { version = "0.8", optional = true}
lib-sqlite = { path = "../lib-sqlite", optional = true }
[features] [features]
collaboration = ["flowy-sync"] collaboration = ["flowy-sync"]
ot = ["lib-ot"] ot = ["lib-ot"]
serde = ["serde_json"] serde = ["serde_json"]
http_server = ["http-flowy"] http_server = ["http-flowy"]
db = ["flowy-database", "lib-sqlite", "r2d2"] db = ["flowy-database", "r2d2"]
dart = ["flowy-codegen/dart"] dart = ["flowy-codegen/dart"]
[build-dependencies] [build-dependencies]

View File

@ -11,11 +11,3 @@ impl std::convert::From<::r2d2::Error> for FlowyError {
FlowyError::internal().context(error) FlowyError::internal().context(error)
} }
} }
// use diesel::result::{Error, DatabaseErrorKind};
// use lib_sqlite::ErrorKind;
impl std::convert::From<lib_sqlite::Error> for FlowyError {
fn from(error: lib_sqlite::Error) -> Self {
FlowyError::internal().context(error)
}
}

View File

@ -14,11 +14,12 @@ lib-ot = { path = "../../../shared-lib/lib-ot" }
lib-infra = { path = "../../../shared-lib/lib-infra" } lib-infra = { path = "../../../shared-lib/lib-infra" }
flowy-document = { path = "../flowy-document" } flowy-document = { path = "../flowy-document" }
flowy-database = { path = "../flowy-database" } flowy-database = { path = "../flowy-database", optional = true }
flowy-error = { path = "../flowy-error", features = ["db", "http_server"]} flowy-error = { path = "../flowy-error", features = ["db", "http_server"]}
dart-notify = { path = "../dart-notify" } dart-notify = { path = "../dart-notify" }
lib-dispatch = { path = "../lib-dispatch" } lib-dispatch = { path = "../lib-dispatch" }
flowy-revision = { path = "../flowy-revision" } flowy-revision = { path = "../flowy-revision" }
flowy-revision-persistence = { path = "../flowy-revision-persistence" }
parking_lot = "0.12.1" parking_lot = "0.12.1"
protobuf = {version = "2.18.0"} protobuf = {version = "2.18.0"}
@ -49,5 +50,6 @@ flowy-codegen = { path = "../flowy-codegen"}
default = [] default = []
sync = [] sync = []
cloud_sync = ["sync"] cloud_sync = ["sync"]
rev-sqlite = ["flowy-database", "flowy-folder/rev-sqlite"]
flowy_unit_test = ["lib-ot/flowy_unit_test", "flowy-revision/flowy_unit_test"] flowy_unit_test = ["lib-ot/flowy_unit_test", "flowy-revision/flowy_unit_test"]
dart = ["flowy-codegen/dart", "dart-notify/dart"] dart = ["flowy-codegen/dart", "dart-notify/dart"]

View File

@ -105,7 +105,7 @@ impl std::default::Default for TrashType {
} }
} }
#[derive(PartialEq, ProtoBuf, Default, Debug, Clone)] #[derive(PartialEq, Eq, ProtoBuf, Default, Debug, Clone)]
pub struct RepeatedTrashIdPB { pub struct RepeatedTrashIdPB {
#[pb(index = 1)] #[pb(index = 1)]
pub items: Vec<TrashIdPB>, pub items: Vec<TrashIdPB>,

View File

@ -52,8 +52,8 @@ impl AppController {
pub(crate) async fn create_app_on_local(&self, app: AppRevision) -> Result<AppPB, FlowyError> { pub(crate) async fn create_app_on_local(&self, app: AppRevision) -> Result<AppPB, FlowyError> {
self.persistence self.persistence
.begin_transaction(|transaction| { .begin_transaction(|transaction| {
let _ = transaction.create_app(app.clone())?; transaction.create_app(app.clone())?;
let _ = notify_apps_changed(&app.workspace_id, self.trash_controller.clone(), &transaction)?; notify_apps_changed(&app.workspace_id, self.trash_controller.clone(), &transaction)?;
Ok(()) Ok(())
}) })
.await?; .await?;
@ -101,9 +101,9 @@ impl AppController {
pub(crate) async fn move_app(&self, app_id: &str, from: usize, to: usize) -> FlowyResult<()> { pub(crate) async fn move_app(&self, app_id: &str, from: usize, to: usize) -> FlowyResult<()> {
self.persistence self.persistence
.begin_transaction(|transaction| { .begin_transaction(|transaction| {
let _ = transaction.move_app(app_id, from, to)?; transaction.move_app(app_id, from, to)?;
let app = transaction.read_app(app_id)?; let app = transaction.read_app(app_id)?;
let _ = notify_apps_changed(&app.workspace_id, self.trash_controller.clone(), &transaction)?; notify_apps_changed(&app.workspace_id, self.trash_controller.clone(), &transaction)?;
Ok(()) Ok(())
}) })
.await?; .await?;

View File

@ -3,6 +3,7 @@ pub mod rev_sqlite;
pub mod version_1; pub mod version_1;
mod version_2; mod version_2;
use crate::services::persistence::rev_sqlite::SQLiteFolderRevisionPersistence;
use crate::{ use crate::{
event_map::WorkspaceDatabase, event_map::WorkspaceDatabase,
manager::FolderId, manager::FolderId,
@ -11,12 +12,10 @@ use crate::{
use flowy_database::ConnectionPool; use flowy_database::ConnectionPool;
use flowy_error::{FlowyError, FlowyResult}; use flowy_error::{FlowyError, FlowyResult};
use flowy_http_model::revision::Revision; use flowy_http_model::revision::Revision;
use flowy_revision::disk::{RevisionDiskCache, RevisionState, SyncRecord}; use flowy_revision_persistence::{RevisionDiskCache, RevisionState, SyncRecord};
use flowy_sync::client_folder::FolderPad; use flowy_sync::client_folder::FolderPad;
use folder_rev_model::{AppRevision, TrashRevision, ViewRevision, WorkspaceRevision};
use crate::services::persistence::rev_sqlite::SQLiteFolderRevisionPersistence;
use flowy_sync::server_folder::FolderOperationsBuilder; use flowy_sync::server_folder::FolderOperationsBuilder;
use folder_rev_model::{AppRevision, TrashRevision, ViewRevision, WorkspaceRevision};
use std::sync::Arc; use std::sync::Arc;
use tokio::sync::RwLock; use tokio::sync::RwLock;
pub use version_1::{app_sql::*, trash_sql::*, v1_impl::V1Transaction, view_sql::*, workspace_sql::*}; pub use version_1::{app_sql::*, trash_sql::*, v1_impl::V1Transaction, view_sql::*, workspace_sql::*};

View File

@ -9,7 +9,7 @@ use flowy_database::{
use flowy_error::{internal_error, FlowyError, FlowyResult}; use flowy_error::{internal_error, FlowyError, FlowyResult};
use flowy_http_model::revision::{Revision, RevisionRange}; use flowy_http_model::revision::{Revision, RevisionRange};
use flowy_http_model::util::md5; use flowy_http_model::util::md5;
use flowy_revision::disk::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord}; use flowy_revision_persistence::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord};
use std::sync::Arc; use std::sync::Arc;
pub struct SQLiteFolderRevisionPersistence { pub struct SQLiteFolderRevisionPersistence {
@ -22,7 +22,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteFolderRevisionPersistence
fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> { fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?; let conn = self.pool.get().map_err(internal_error)?;
FolderRevisionSql::create(revision_records, &*conn)?; FolderRevisionSql::create(revision_records, &conn)?;
Ok(()) Ok(())
} }
@ -54,7 +54,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteFolderRevisionPersistence
let conn = &*self.pool.get().map_err(internal_error)?; let conn = &*self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| { conn.immediate_transaction::<_, FlowyError, _>(|| {
for changeset in changesets { for changeset in changesets {
let _ = FolderRevisionSql::update(changeset, conn)?; FolderRevisionSql::update(changeset, conn)?;
} }
Ok(()) Ok(())
})?; })?;
@ -75,8 +75,8 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteFolderRevisionPersistence
) -> Result<(), Self::Error> { ) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?; let conn = self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| { conn.immediate_transaction::<_, FlowyError, _>(|| {
FolderRevisionSql::delete(object_id, deleted_rev_ids, &*conn)?; FolderRevisionSql::delete(object_id, deleted_rev_ids, &conn)?;
FolderRevisionSql::create(inserted_records, &*conn)?; FolderRevisionSql::create(inserted_records, &conn)?;
Ok(()) Ok(())
}) })
} }

View File

@ -1,3 +1,4 @@
#![allow(clippy::unused_unit)]
use bytes::Bytes; use bytes::Bytes;
use flowy_database::{ use flowy_database::{
prelude::*, prelude::*,

View File

@ -15,7 +15,7 @@ pub struct V1Transaction<'a>(pub &'a DBConnection);
impl<'a> FolderPersistenceTransaction for V1Transaction<'a> { impl<'a> FolderPersistenceTransaction for V1Transaction<'a> {
fn create_workspace(&self, user_id: &str, workspace_rev: WorkspaceRevision) -> FlowyResult<()> { fn create_workspace(&self, user_id: &str, workspace_rev: WorkspaceRevision) -> FlowyResult<()> {
WorkspaceTableSql::create_workspace(user_id, workspace_rev, &*self.0)?; WorkspaceTableSql::create_workspace(user_id, workspace_rev, self.0)?;
Ok(()) Ok(())
} }
@ -34,12 +34,12 @@ impl<'a> FolderPersistenceTransaction for V1Transaction<'a> {
} }
fn create_app(&self, app_rev: AppRevision) -> FlowyResult<()> { fn create_app(&self, app_rev: AppRevision) -> FlowyResult<()> {
AppTableSql::create_app(app_rev, &*self.0)?; AppTableSql::create_app(app_rev, self.0)?;
Ok(()) Ok(())
} }
fn update_app(&self, changeset: AppChangeset) -> FlowyResult<()> { fn update_app(&self, changeset: AppChangeset) -> FlowyResult<()> {
AppTableSql::update_app(changeset, &*self.0)?; AppTableSql::update_app(changeset, self.0)?;
Ok(()) Ok(())
} }
@ -64,7 +64,7 @@ impl<'a> FolderPersistenceTransaction for V1Transaction<'a> {
} }
fn create_view(&self, view_rev: ViewRevision) -> FlowyResult<()> { fn create_view(&self, view_rev: ViewRevision) -> FlowyResult<()> {
ViewTableSql::create_view(view_rev, &*self.0)?; ViewTableSql::create_view(view_rev, self.0)?;
Ok(()) Ok(())
} }
@ -80,13 +80,13 @@ impl<'a> FolderPersistenceTransaction for V1Transaction<'a> {
} }
fn update_view(&self, changeset: ViewChangeset) -> FlowyResult<()> { fn update_view(&self, changeset: ViewChangeset) -> FlowyResult<()> {
ViewTableSql::update_view(changeset, &*self.0)?; ViewTableSql::update_view(changeset, self.0)?;
Ok(()) Ok(())
} }
fn delete_view(&self, view_id: &str) -> FlowyResult<ViewRevision> { fn delete_view(&self, view_id: &str) -> FlowyResult<ViewRevision> {
let view_revision: ViewRevision = ViewTableSql::read_view(view_id, self.0)?.into(); let view_revision: ViewRevision = ViewTableSql::read_view(view_id, self.0)?.into();
ViewTableSql::delete_view(view_id, &*self.0)?; ViewTableSql::delete_view(view_id, self.0)?;
Ok(view_revision) Ok(view_revision)
} }
@ -95,7 +95,7 @@ impl<'a> FolderPersistenceTransaction for V1Transaction<'a> {
} }
fn create_trash(&self, trashes: Vec<TrashRevision>) -> FlowyResult<()> { fn create_trash(&self, trashes: Vec<TrashRevision>) -> FlowyResult<()> {
TrashTableSql::create_trash(trashes, &*self.0)?; TrashTableSql::create_trash(trashes, self.0)?;
Ok(()) Ok(())
} }
@ -114,7 +114,7 @@ impl<'a> FolderPersistenceTransaction for V1Transaction<'a> {
None => TrashTableSql::delete_all(self.0), None => TrashTableSql::delete_all(self.0),
Some(trash_ids) => { Some(trash_ids) => {
for trash_id in &trash_ids { for trash_id in &trash_ids {
TrashTableSql::delete_trash(trash_id, &*self.0)?; TrashTableSql::delete_trash(trash_id, self.0)?;
} }
Ok(()) Ok(())
} }

View File

@ -168,7 +168,7 @@ impl TrashController {
self.persistence self.persistence
.begin_transaction(|transaction| { .begin_transaction(|transaction| {
let _ = transaction.create_trash(trash_revs.clone())?; transaction.create_trash(trash_revs.clone())?;
let _ = self.create_trash_on_server(trash_revs); let _ = self.create_trash_on_server(trash_revs);
notify_trash_changed(transaction.read_trash(None)?); notify_trash_changed(transaction.read_trash(None)?);

View File

@ -238,9 +238,9 @@ impl ViewController {
pub(crate) async fn move_view(&self, view_id: &str, from: usize, to: usize) -> Result<(), FlowyError> { pub(crate) async fn move_view(&self, view_id: &str, from: usize, to: usize) -> Result<(), FlowyError> {
self.persistence self.persistence
.begin_transaction(|transaction| { .begin_transaction(|transaction| {
let _ = transaction.move_view(view_id, from, to)?; transaction.move_view(view_id, from, to)?;
let view = transaction.read_view(view_id)?; let view = transaction.read_view(view_id)?;
let _ = notify_views_changed(&view.app_id, self.trash_controller.clone(), &transaction)?; notify_views_changed(&view.app_id, self.trash_controller.clone(), &transaction)?;
Ok(()) Ok(())
}) })
.await?; .await?;

View File

@ -122,7 +122,7 @@ fn read_workspaces_on_server(
for workspace_rev in &workspace_revs { for workspace_rev in &workspace_revs {
let m_workspace = workspace_rev.clone(); let m_workspace = workspace_rev.clone();
let app_revs = m_workspace.apps.clone(); let app_revs = m_workspace.apps.clone();
let _ = transaction.create_workspace(&user_id, m_workspace)?; transaction.create_workspace(&user_id, m_workspace)?;
tracing::trace!("Save {} apps", app_revs.len()); tracing::trace!("Save {} apps", app_revs.len());
for app_rev in app_revs { for app_rev in app_revs {
let view_revs = app_rev.belongings.clone(); let view_revs = app_rev.belongings.clone();

View File

@ -1,7 +1,7 @@
use crate::script::{invalid_workspace_name_test_case, FolderScript::*, FolderTest}; use crate::script::{invalid_workspace_name_test_case, FolderScript::*, FolderTest};
use flowy_folder::entities::view::ViewDataFormatPB; use flowy_folder::entities::view::ViewDataFormatPB;
use flowy_folder::entities::workspace::CreateWorkspacePayloadPB; use flowy_folder::entities::workspace::CreateWorkspacePayloadPB;
use flowy_revision::disk::RevisionState; use flowy_revision_persistence::RevisionState;
use flowy_test::{event_builder::*, FlowySDKTest}; use flowy_test::{event_builder::*, FlowySDKTest};
#[tokio::test] #[tokio::test]

View File

@ -15,8 +15,8 @@ use flowy_folder::entities::{
}; };
use flowy_folder::event_map::FolderEvent::*; use flowy_folder::event_map::FolderEvent::*;
use flowy_folder::{errors::ErrorCode, services::folder_editor::FolderEditor}; use flowy_folder::{errors::ErrorCode, services::folder_editor::FolderEditor};
use flowy_revision::disk::RevisionState;
use flowy_revision::REVISION_WRITE_INTERVAL_IN_MILLIS; use flowy_revision::REVISION_WRITE_INTERVAL_IN_MILLIS;
use flowy_revision_persistence::RevisionState;
use flowy_test::{event_builder::*, FlowySDKTest}; use flowy_test::{event_builder::*, FlowySDKTest};
use std::{sync::Arc, time::Duration}; use std::{sync::Arc, time::Duration};
use tokio::time::sleep; use tokio::time::sleep;
@ -259,13 +259,12 @@ pub async fn create_workspace(sdk: &FlowySDKTest, name: &str, desc: &str) -> Wor
desc: desc.to_owned(), desc: desc.to_owned(),
}; };
let workspace = FolderEventBuilder::new(sdk.clone()) FolderEventBuilder::new(sdk.clone())
.event(CreateWorkspace) .event(CreateWorkspace)
.payload(request) .payload(request)
.async_send() .async_send()
.await .await
.parse::<WorkspacePB>(); .parse::<WorkspacePB>()
workspace
} }
pub async fn read_workspace(sdk: &FlowySDKTest, workspace_id: Option<String>) -> Vec<WorkspacePB> { pub async fn read_workspace(sdk: &FlowySDKTest, workspace_id: Option<String>) -> Vec<WorkspacePB> {
@ -300,13 +299,12 @@ pub async fn create_app(sdk: &FlowySDKTest, workspace_id: &str, name: &str, desc
color_style: Default::default(), color_style: Default::default(),
}; };
let app = FolderEventBuilder::new(sdk.clone()) FolderEventBuilder::new(sdk.clone())
.event(CreateApp) .event(CreateApp)
.payload(create_app_request) .payload(create_app_request)
.async_send() .async_send()
.await .await
.parse::<AppPB>(); .parse::<AppPB>()
app
} }
pub async fn read_app(sdk: &FlowySDKTest, app_id: &str) -> AppPB { pub async fn read_app(sdk: &FlowySDKTest, app_id: &str) -> AppPB {
@ -314,14 +312,12 @@ pub async fn read_app(sdk: &FlowySDKTest, app_id: &str) -> AppPB {
value: app_id.to_owned(), value: app_id.to_owned(),
}; };
let app = FolderEventBuilder::new(sdk.clone()) FolderEventBuilder::new(sdk.clone())
.event(ReadApp) .event(ReadApp)
.payload(request) .payload(request)
.async_send() .async_send()
.await .await
.parse::<AppPB>(); .parse::<AppPB>()
app
} }
pub async fn update_app(sdk: &FlowySDKTest, app_id: &str, name: Option<String>, desc: Option<String>) { pub async fn update_app(sdk: &FlowySDKTest, app_id: &str, name: Option<String>, desc: Option<String>) {
@ -369,13 +365,12 @@ pub async fn create_view(
layout, layout,
view_content_data: vec![], view_content_data: vec![],
}; };
let view = FolderEventBuilder::new(sdk.clone()) FolderEventBuilder::new(sdk.clone())
.event(CreateView) .event(CreateView)
.payload(request) .payload(request)
.async_send() .async_send()
.await .await
.parse::<ViewPB>(); .parse::<ViewPB>()
view
} }
pub async fn read_view(sdk: &FlowySDKTest, view_id: &str) -> ViewPB { pub async fn read_view(sdk: &FlowySDKTest, view_id: &str) -> ViewPB {

View File

@ -9,6 +9,7 @@ edition = "2021"
lib-dispatch = { path = "../lib-dispatch" } lib-dispatch = { path = "../lib-dispatch" }
dart-notify = { path = "../dart-notify" } dart-notify = { path = "../dart-notify" }
flowy-revision = { path = "../flowy-revision" } flowy-revision = { path = "../flowy-revision" }
flowy-revision-persistence = { path = "../flowy-revision-persistence" }
flowy-task= { path = "../flowy-task" } flowy-task= { path = "../flowy-task" }
flowy-error = { path = "../flowy-error", features = ["db"]} flowy-error = { path = "../flowy-error", features = ["db"]}
flowy-derive = { path = "../flowy-derive" } flowy-derive = { path = "../flowy-derive" }
@ -17,7 +18,7 @@ lib-infra = { path = "../../../shared-lib/lib-infra" }
grid-rev-model = { path = "../../../shared-lib/grid-rev-model" } grid-rev-model = { path = "../../../shared-lib/grid-rev-model" }
flowy-sync = { path = "../flowy-sync"} flowy-sync = { path = "../flowy-sync"}
flowy-http-model = { path = "../../../shared-lib/flowy-http-model" } flowy-http-model = { path = "../../../shared-lib/flowy-http-model" }
flowy-database = { path = "../flowy-database" } flowy-database = { path = "../flowy-database", optional = true }
anyhow = "1.0" anyhow = "1.0"
strum = "0.21" strum = "0.21"
@ -56,5 +57,6 @@ flowy-codegen = { path = "../flowy-codegen"}
[features] [features]
default = [] default = []
rev-sqlite = ["flowy-database"]
dart = ["flowy-codegen/dart", "dart-notify/dart"] dart = ["flowy-codegen/dart", "dart-notify/dart"]
flowy_unit_test = ["flowy-revision/flowy_unit_test"] flowy_unit_test = ["flowy-revision/flowy_unit_test"]

View File

@ -182,7 +182,7 @@ impl CellDataChangeset for NumberTypeOptionPB {
let number_cell_data = self.format_cell_data(&data)?; let number_cell_data = self.format_cell_data(&data)?;
match self.format { match self.format {
NumberFormat::Num => Ok((number_cell_data.to_string().into(), number_cell_data.to_string().into())), NumberFormat::Num => Ok((number_cell_data.to_string(), number_cell_data.to_string().into())),
_ => Ok((data, number_cell_data.to_string().into())), _ => Ok((data, number_cell_data.to_string().into())),
} }
} }

View File

@ -9,7 +9,7 @@ use flowy_database::{
use flowy_error::{internal_error, FlowyError, FlowyResult}; use flowy_error::{internal_error, FlowyError, FlowyResult};
use flowy_http_model::revision::{Revision, RevisionRange}; use flowy_http_model::revision::{Revision, RevisionRange};
use flowy_http_model::util::md5; use flowy_http_model::util::md5;
use flowy_revision::disk::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord}; use flowy_revision_persistence::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord};
use std::sync::Arc; use std::sync::Arc;
pub struct SQLiteGridBlockRevisionPersistence { pub struct SQLiteGridBlockRevisionPersistence {
@ -22,7 +22,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridBlockRevisionPersisten
fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> { fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?; let conn = self.pool.get().map_err(internal_error)?;
GridMetaRevisionSql::create(revision_records, &*conn)?; GridMetaRevisionSql::create(revision_records, &conn)?;
Ok(()) Ok(())
} }
@ -54,7 +54,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridBlockRevisionPersisten
let conn = &*self.pool.get().map_err(internal_error)?; let conn = &*self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| { conn.immediate_transaction::<_, FlowyError, _>(|| {
for changeset in changesets { for changeset in changesets {
let _ = GridMetaRevisionSql::update(changeset, conn)?; GridMetaRevisionSql::update(changeset, conn)?;
} }
Ok(()) Ok(())
})?; })?;
@ -75,8 +75,8 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridBlockRevisionPersisten
) -> Result<(), Self::Error> { ) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?; let conn = self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| { conn.immediate_transaction::<_, FlowyError, _>(|| {
GridMetaRevisionSql::delete(object_id, deleted_rev_ids, &*conn)?; GridMetaRevisionSql::delete(object_id, deleted_rev_ids, &conn)?;
GridMetaRevisionSql::create(inserted_records, &*conn)?; GridMetaRevisionSql::create(inserted_records, &conn)?;
Ok(()) Ok(())
}) })
} }

View File

@ -1,3 +1,4 @@
#![allow(clippy::unused_unit)]
use bytes::Bytes; use bytes::Bytes;
use flowy_database::{ use flowy_database::{
prelude::*, prelude::*,

View File

@ -9,7 +9,7 @@ use flowy_database::{
use flowy_error::{internal_error, FlowyError, FlowyResult}; use flowy_error::{internal_error, FlowyError, FlowyResult};
use flowy_http_model::revision::{Revision, RevisionRange}; use flowy_http_model::revision::{Revision, RevisionRange};
use flowy_http_model::util::md5; use flowy_http_model::util::md5;
use flowy_revision::disk::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord}; use flowy_revision_persistence::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord};
use std::sync::Arc; use std::sync::Arc;
pub struct SQLiteGridRevisionPersistence { pub struct SQLiteGridRevisionPersistence {
@ -22,7 +22,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridRevisionPersistence {
fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> { fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?; let conn = self.pool.get().map_err(internal_error)?;
GridRevisionSql::create(revision_records, &*conn)?; GridRevisionSql::create(revision_records, &conn)?;
Ok(()) Ok(())
} }
@ -54,7 +54,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridRevisionPersistence {
let conn = &*self.pool.get().map_err(internal_error)?; let conn = &*self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| { conn.immediate_transaction::<_, FlowyError, _>(|| {
for changeset in changesets { for changeset in changesets {
let _ = GridRevisionSql::update(changeset, conn)?; GridRevisionSql::update(changeset, conn)?;
} }
Ok(()) Ok(())
})?; })?;
@ -75,8 +75,8 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridRevisionPersistence {
) -> Result<(), Self::Error> { ) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?; let conn = self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| { conn.immediate_transaction::<_, FlowyError, _>(|| {
GridRevisionSql::delete(object_id, deleted_rev_ids, &*conn)?; GridRevisionSql::delete(object_id, deleted_rev_ids, &conn)?;
GridRevisionSql::create(inserted_records, &*conn)?; GridRevisionSql::create(inserted_records, &conn)?;
Ok(()) Ok(())
}) })
} }

View File

@ -9,7 +9,7 @@ use flowy_database::{
use flowy_error::{internal_error, FlowyError, FlowyResult}; use flowy_error::{internal_error, FlowyError, FlowyResult};
use flowy_http_model::revision::{Revision, RevisionRange}; use flowy_http_model::revision::{Revision, RevisionRange};
use flowy_http_model::util::md5; use flowy_http_model::util::md5;
use flowy_revision::disk::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord}; use flowy_revision_persistence::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord};
use std::sync::Arc; use std::sync::Arc;
pub struct SQLiteGridViewRevisionPersistence { pub struct SQLiteGridViewRevisionPersistence {
@ -31,7 +31,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridViewRevisionPersistenc
fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> { fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?; let conn = self.pool.get().map_err(internal_error)?;
GridViewRevisionSql::create(revision_records, &*conn)?; GridViewRevisionSql::create(revision_records, &conn)?;
Ok(()) Ok(())
} }
@ -63,7 +63,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridViewRevisionPersistenc
let conn = &*self.pool.get().map_err(internal_error)?; let conn = &*self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| { conn.immediate_transaction::<_, FlowyError, _>(|| {
for changeset in changesets { for changeset in changesets {
let _ = GridViewRevisionSql::update(changeset, conn)?; GridViewRevisionSql::update(changeset, conn)?;
} }
Ok(()) Ok(())
})?; })?;
@ -84,8 +84,8 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridViewRevisionPersistenc
) -> Result<(), Self::Error> { ) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?; let conn = self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| { conn.immediate_transaction::<_, FlowyError, _>(|| {
GridViewRevisionSql::delete(object_id, deleted_rev_ids, &*conn)?; GridViewRevisionSql::delete(object_id, deleted_rev_ids, &conn)?;
GridViewRevisionSql::create(inserted_records, &*conn)?; GridViewRevisionSql::create(inserted_records, &conn)?;
Ok(()) Ok(())
}) })
} }

View File

@ -94,9 +94,8 @@ async fn url_cell_date_test() {
for (i, cell) in cells.iter().enumerate() { for (i, cell) in cells.iter().enumerate() {
let url_cell_data = cell.get_url_field_cell_data().unwrap(); let url_cell_data = cell.get_url_field_cell_data().unwrap();
match i { if i == 0 {
0 => assert_eq!(url_cell_data.url.as_str(), "https://www.appflowy.io/"), assert_eq!(url_cell_data.url.as_str(), "https://www.appflowy.io/")
_ => {}
} }
} }
} }

View File

@ -47,9 +47,7 @@ impl GridSnapshotTest {
} }
pub async fn grid_pad(&self) -> GridRevisionPad { pub async fn grid_pad(&self) -> GridRevisionPad {
let pad = self.editor.grid_pad(); self.editor.grid_pad().read().await.clone()
let pad = (*pad.read().await).clone();
pad
} }
pub async fn run_scripts(&mut self, scripts: Vec<SnapshotScript>) { pub async fn run_scripts(&mut self, scripts: Vec<SnapshotScript>) {

View File

@ -48,7 +48,8 @@ impl LocalServer {
} }
pub async fn stop(&self) { pub async fn stop(&self) {
if let Some(stop_tx) = self.stop_tx.read().clone() { let sender = self.stop_tx.read().clone();
if let Some(stop_tx) = sender {
let _ = stop_tx.send(()).await; let _ = stop_tx.send(()).await;
} }
} }

View File

@ -0,0 +1,13 @@
[package]
name = "flowy-revision-persistence"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
flowy-error = { path = "../flowy-error" }
flowy-http-model = { path = "../../../shared-lib/flowy-http-model" }
[features]
rev-file = []

View File

@ -0,0 +1,54 @@
use crate::{RevisionChangeset, RevisionDiskCache, SyncRecord};
use flowy_error::FlowyResult;
use flowy_http_model::revision::RevisionRange;
pub struct FileRevisionDiskCache {
path: String,
}
pub type FileRevisionDiskCacheConnection = ();
impl RevisionDiskCache<FileRevisionDiskCacheConnection> for FileRevisionDiskCache {
type Error = ();
fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> {
Ok(())
}
fn get_connection(&self) -> Result<FileRevisionDiskCacheConnection, Self::Error> {
return Ok(());
}
fn read_revision_records(
&self,
object_id: &str,
rev_ids: Option<Vec<i64>>,
) -> Result<Vec<SyncRecord>, Self::Error> {
Ok(vec![])
}
fn read_revision_records_with_range(
&self,
object_id: &str,
range: &RevisionRange,
) -> Result<Vec<SyncRecord>, Self::Error> {
Ok(vec![])
}
fn update_revision_record(&self, changesets: Vec<RevisionChangeset>) -> FlowyResult<()> {
Ok(())
}
fn delete_revision_records(&self, object_id: &str, rev_ids: Option<Vec<i64>>) -> Result<(), Self::Error> {
Ok(())
}
fn delete_and_insert_records(
&self,
object_id: &str,
deleted_rev_ids: Option<Vec<i64>>,
inserted_records: Vec<SyncRecord>,
) -> Result<(), Self::Error> {
todo!()
}
}

View File

@ -0,0 +1,2 @@
#[cfg(feature = "rev-file")]
pub mod file_persistence;

View File

@ -1,3 +1,5 @@
mod disk_cache_impl;
use flowy_error::{FlowyError, FlowyResult}; use flowy_error::{FlowyError, FlowyResult};
use flowy_http_model::revision::{Revision, RevisionRange}; use flowy_http_model::revision::{Revision, RevisionRange};
use std::fmt::Debug; use std::fmt::Debug;

View File

@ -10,6 +10,7 @@ flowy-http-model = { path = "../../../shared-lib/flowy-http-model" }
lib-ws = { path = "../../../shared-lib/lib-ws" } lib-ws = { path = "../../../shared-lib/lib-ws" }
lib-infra = { path = "../../../shared-lib/lib-infra" } lib-infra = { path = "../../../shared-lib/lib-infra" }
flowy-error = { path = "../flowy-error" } flowy-error = { path = "../flowy-error" }
flowy-revision-persistence= { path = "../flowy-revision-persistence" }
tracing = { version = "0.1", features = ["log"] } tracing = { version = "0.1", features = ["log"] }
tokio = {version = "1", features = ["sync"]} tokio = {version = "1", features = ["sync"]}
bytes = { version = "1.1" } bytes = { version = "1.1" }

View File

@ -1,8 +1,8 @@
use crate::disk::SyncRecord;
use crate::REVISION_WRITE_INTERVAL_IN_MILLIS; use crate::REVISION_WRITE_INTERVAL_IN_MILLIS;
use dashmap::DashMap; use dashmap::DashMap;
use flowy_error::{FlowyError, FlowyResult}; use flowy_error::{FlowyError, FlowyResult};
use flowy_http_model::revision::RevisionRange; use flowy_http_model::revision::RevisionRange;
use flowy_revision_persistence::SyncRecord;
use std::{borrow::Cow, sync::Arc, time::Duration}; use std::{borrow::Cow, sync::Arc, time::Duration};
use tokio::{sync::RwLock, task::JoinHandle}; use tokio::{sync::RwLock, task::JoinHandle};

View File

@ -1,3 +1,2 @@
pub mod disk;
pub(crate) mod memory; pub(crate) mod memory;
pub mod reset; pub mod reset;

View File

@ -1,8 +1,8 @@
use crate::disk::{RevisionDiskCache, SyncRecord};
use crate::{RevisionLoader, RevisionPersistence, RevisionPersistenceConfiguration}; use crate::{RevisionLoader, RevisionPersistence, RevisionPersistenceConfiguration};
use bytes::Bytes; use bytes::Bytes;
use flowy_error::{FlowyError, FlowyResult}; use flowy_error::{FlowyError, FlowyResult};
use flowy_http_model::revision::Revision; use flowy_http_model::revision::Revision;
use flowy_revision_persistence::{RevisionDiskCache, SyncRecord};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::str::FromStr; use std::str::FromStr;
use std::sync::Arc; use std::sync::Arc;

View File

@ -286,7 +286,7 @@ impl<Connection: 'static> RevisionManager<Connection> {
pub fn ack_notify(&self) -> tokio::sync::broadcast::Receiver<i64> { pub fn ack_notify(&self) -> tokio::sync::broadcast::Receiver<i64> {
self.rev_ack_notifier.subscribe() self.rev_ack_notifier.subscribe()
} }
pub fn get_all_revision_records(&self) -> FlowyResult<Vec<crate::disk::SyncRecord>> { pub fn get_all_revision_records(&self) -> FlowyResult<Vec<flowy_revision_persistence::SyncRecord>> {
self.rev_persistence.load_all_records(&self.object_id) self.rev_persistence.load_all_records(&self.object_id)
} }
} }

View File

@ -1,12 +1,9 @@
use crate::cache::{ use crate::cache::memory::RevisionMemoryCacheDelegate;
disk::{RevisionChangeset, RevisionDiskCache},
memory::RevisionMemoryCacheDelegate,
};
use crate::disk::{RevisionState, SyncRecord};
use crate::memory::RevisionMemoryCache; use crate::memory::RevisionMemoryCache;
use crate::RevisionMergeable; use crate::RevisionMergeable;
use flowy_error::{internal_error, FlowyError, FlowyResult}; use flowy_error::{internal_error, FlowyError, FlowyResult};
use flowy_http_model::revision::{Revision, RevisionRange}; use flowy_http_model::revision::{Revision, RevisionRange};
use flowy_revision_persistence::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord};
use std::collections::{HashMap, VecDeque}; use std::collections::{HashMap, VecDeque};
use std::{borrow::Cow, sync::Arc}; use std::{borrow::Cow, sync::Arc};

View File

@ -1,10 +1,10 @@
use bytes::Bytes; use bytes::Bytes;
use flowy_error::{internal_error, FlowyError, FlowyResult}; use flowy_error::{internal_error, FlowyError, FlowyResult};
use flowy_revision::disk::{RevisionChangeset, RevisionDiskCache, SyncRecord};
use flowy_revision::{ use flowy_revision::{
RevisionManager, RevisionMergeable, RevisionObjectDeserializer, RevisionPersistence, RevisionManager, RevisionMergeable, RevisionObjectDeserializer, RevisionPersistence,
RevisionPersistenceConfiguration, RevisionSnapshot, RevisionSnapshotDiskCache, REVISION_WRITE_INTERVAL_IN_MILLIS, RevisionPersistenceConfiguration, RevisionSnapshot, RevisionSnapshotDiskCache, REVISION_WRITE_INTERVAL_IN_MILLIS,
}; };
use flowy_revision_persistence::{RevisionChangeset, RevisionDiskCache, SyncRecord};
use flowy_http_model::revision::{Revision, RevisionRange}; use flowy_http_model::revision::{Revision, RevisionRange};
use flowy_http_model::util::md5; use flowy_http_model::util::md5;

View File

@ -70,13 +70,12 @@ async fn create_workspace(sdk: &FlowySDKTest, name: &str, desc: &str) -> Workspa
desc: desc.to_owned(), desc: desc.to_owned(),
}; };
let workspace = FolderEventBuilder::new(sdk.clone()) FolderEventBuilder::new(sdk.clone())
.event(CreateWorkspace) .event(CreateWorkspace)
.payload(request) .payload(request)
.async_send() .async_send()
.await .await
.parse::<WorkspacePB>(); .parse::<WorkspacePB>()
workspace
} }
async fn open_workspace(sdk: &FlowySDKTest, workspace_id: &str) { async fn open_workspace(sdk: &FlowySDKTest, workspace_id: &str) {
@ -98,13 +97,12 @@ async fn create_app(sdk: &FlowySDKTest, name: &str, desc: &str, workspace_id: &s
color_style: Default::default(), color_style: Default::default(),
}; };
let app = FolderEventBuilder::new(sdk.clone()) FolderEventBuilder::new(sdk.clone())
.event(CreateApp) .event(CreateApp)
.payload(create_app_request) .payload(create_app_request)
.async_send() .async_send()
.await .await
.parse::<AppPB>(); .parse::<AppPB>()
app
} }
async fn create_view( async fn create_view(
@ -124,13 +122,12 @@ async fn create_view(
view_content_data: data, view_content_data: data,
}; };
let view = FolderEventBuilder::new(sdk.clone()) FolderEventBuilder::new(sdk.clone())
.event(CreateView) .event(CreateView)
.payload(request) .payload(request)
.async_send() .async_send()
.await .await
.parse::<ViewPB>(); .parse::<ViewPB>()
view
} }
pub fn root_dir() -> String { pub fn root_dir() -> String {

View File

@ -45,8 +45,7 @@ impl FlowySDKTest {
} }
pub async fn sign_up(&self) -> SignUpContext { pub async fn sign_up(&self) -> SignUpContext {
let context = async_sign_up(self.inner.dispatcher()).await; async_sign_up(self.inner.dispatcher()).await
context
} }
pub async fn init_user(&self) -> UserProfilePB { pub async fn init_user(&self) -> UserProfilePB {

View File

@ -7,7 +7,7 @@ edition = "2018"
[dependencies] [dependencies]
flowy-derive = { path = "../flowy-derive" } flowy-derive = { path = "../flowy-derive" }
flowy-database = { path = "../flowy-database" } flowy-database = { path = "../flowy-database", optional = true }
flowy-error = { path = "../flowy-error", features = ["db", "http_server"] } flowy-error = { path = "../flowy-error", features = ["db", "http_server"] }
lib-infra = { path = "../../../shared-lib/lib-infra" } lib-infra = { path = "../../../shared-lib/lib-infra" }
@ -45,6 +45,7 @@ rand_core = "0.6.3"
rand = "0.8.5" rand = "0.8.5"
[features] [features]
rev-sqlite = ["flowy-database"]
dart = ["flowy-codegen/dart", "dart-notify/dart"] dart = ["flowy-codegen/dart", "dart-notify/dart"]
[build-dependencies] [build-dependencies]

View File

@ -1,21 +0,0 @@
[package]
name = "lib-sqlite"
version = "0.1.0"
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
r2d2 = "0.8.9"
libsqlite3-sys = { version = ">=0.8.0, <0.24.0", features = ["bundled"] }
diesel = { version = "1.4.8", features = ["sqlite"] }
diesel_derives = { version = "1.4.1", features = ["sqlite"] }
diesel_migrations = { version = "1.4.0", features = ["sqlite"] }
lazy_static = "1.4.0"
scheduled-thread-pool = "0.2.5"
error-chain = "=0.12.0"
log = "0.4.11"
openssl = { version = "0.10.38", optional = true }
[features]
openssl_vendored = ["openssl/vendored"]

View File

@ -67,8 +67,8 @@ script = [
""" """
cd rust-lib/ cd rust-lib/
rustup show rustup show
echo cargo build --package=dart-ffi --target ${RUST_COMPILE_TARGET} --features "${FEATURES}" echo cargo build --package=dart-ffi --target ${RUST_COMPILE_TARGET} --features "${FLUTTER_DESKTOP_FEATURES}"
cargo build --package=dart-ffi --target ${RUST_COMPILE_TARGET} --features "${FEATURES}" cargo build --package=dart-ffi --target ${RUST_COMPILE_TARGET} --features "${FLUTTER_DESKTOP_FEATURES}"
cd ../ cd ../
""", """,
] ]
@ -105,7 +105,7 @@ private = true
script = [ script = [
""" """
cd rust-lib cd rust-lib
exec cargo build --package=dart-ffi --target ${RUST_COMPILE_TARGET} --features "${FEATURES}" exec cargo build --package=dart-ffi --target ${RUST_COMPILE_TARGET} --features "${FLUTTER_DESKTOP_FEATURES}"
cd .. cd ..
""", """,
] ]
@ -128,7 +128,7 @@ run_task = { name = [
script = [ script = [
""" """
cd rust-lib/ cd rust-lib/
cargo build --${BUILD_FLAG} --package=dart-ffi --target ${RUST_COMPILE_TARGET} --features "${FEATURES}" cargo build --${BUILD_FLAG} --package=dart-ffi --target ${RUST_COMPILE_TARGET} --features "${FLUTTER_DESKTOP_FEATURES}"
cd ../ cd ../
""", """,
] ]
@ -216,8 +216,8 @@ script = [
""" """
cd rust-lib/ cd rust-lib/
rustup show rustup show
echo cargo build --package=dart-ffi --target ${TEST_COMPILE_TARGET} --features "${FEATURES}" echo cargo build --package=dart-ffi --target ${TEST_COMPILE_TARGET} --features "${FLUTTER_DESKTOP_FEATURES}"
RUST_LOG=${RUST_LOG} cargo build --package=dart-ffi --target ${TEST_COMPILE_TARGET} --features "${FEATURES}" RUST_LOG=${RUST_LOG} cargo build --package=dart-ffi --target ${TEST_COMPILE_TARGET} --features "${FLUTTER_DESKTOP_FEATURES}"
cd ../ cd ../
""", """,
] ]
@ -229,8 +229,8 @@ script = [
""" """
cd rust-lib/ cd rust-lib/
rustup show rustup show
echo cargo build --package=dart-ffi --target ${TEST_COMPILE_TARGET} --features "${FEATURES}" echo cargo build --package=dart-ffi --target ${TEST_COMPILE_TARGET} --features "${FLUTTER_DESKTOP_FEATURES}"
cargo build --package=dart-ffi --target ${TEST_COMPILE_TARGET} --features "${FEATURES}" cargo build --package=dart-ffi --target ${TEST_COMPILE_TARGET} --features "${FLUTTER_DESKTOP_FEATURES}"
cd ../ cd ../
""", """,
] ]

View File

@ -32,7 +32,7 @@ run_task = { name = ["rust_lib_unit_test", "shared_lib_unit_test"] }
description = "Run rust-lib unit tests" description = "Run rust-lib unit tests"
script = ''' script = '''
cd rust-lib cd rust-lib
cargo test --no-default-features --features="sync" cargo test --no-default-features --features="sync, rev-sqlite"
''' '''
[tasks.shared_lib_unit_test] [tasks.shared_lib_unit_test]

View File

@ -67,9 +67,9 @@ impl Revision {
impl std::fmt::Debug for Revision { impl std::fmt::Debug for Revision {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
let _ = f.write_fmt(format_args!("object_id {}, ", self.object_id))?; f.write_fmt(format_args!("object_id {}, ", self.object_id))?;
let _ = f.write_fmt(format_args!("base_rev_id {}, ", self.base_rev_id))?; f.write_fmt(format_args!("base_rev_id {}, ", self.base_rev_id))?;
let _ = f.write_fmt(format_args!("rev_id {}, ", self.rev_id))?; f.write_fmt(format_args!("rev_id {}, ", self.rev_id))?;
Ok(()) Ok(())
} }
} }

View File

@ -44,7 +44,7 @@ impl ClientRevisionWSData {
object_id: object_id.to_owned(), object_id: object_id.to_owned(),
ty: ClientRevisionWSDataType::ClientPing, ty: ClientRevisionWSDataType::ClientPing,
revisions: vec![], revisions: vec![],
rev_id: rev_id, rev_id,
} }
} }
} }
@ -61,7 +61,7 @@ impl std::convert::TryFrom<ClientRevisionWSData> for Bytes {
type Error = serde_json::Error; type Error = serde_json::Error;
fn try_from(bytes: ClientRevisionWSData) -> Result<Self, Self::Error> { fn try_from(bytes: ClientRevisionWSData) -> Result<Self, Self::Error> {
serde_json::to_vec(&bytes).map(|bytes| Bytes::from(bytes)) serde_json::to_vec(&bytes).map(Bytes::from)
} }
} }
@ -91,7 +91,7 @@ impl std::convert::TryFrom<ServerRevisionWSData> for Bytes {
type Error = serde_json::Error; type Error = serde_json::Error;
fn try_from(bytes: ServerRevisionWSData) -> Result<Self, Self::Error> { fn try_from(bytes: ServerRevisionWSData) -> Result<Self, Self::Error> {
serde_json::to_vec(&bytes).map(|bytes| Bytes::from(bytes)) serde_json::to_vec(&bytes).map(Bytes::from)
} }
} }

View File

@ -124,7 +124,7 @@ impl AttributeHashMap {
impl Display for AttributeHashMap { impl Display for AttributeHashMap {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for (key, value) in self.0.iter() { for (key, value) in self.0.iter() {
let _ = f.write_str(&format!("{:?}:{:?}", key, value))?; f.write_str(&format!("{:?}:{:?}", key, value))?;
} }
Ok(()) Ok(())
} }

View File

@ -206,7 +206,7 @@ pub struct OpMetric();
impl Metric for OpMetric { impl Metric for OpMetric {
fn seek<T: OperationAttributes>(cursor: &mut OperationsCursor<T>, op_offset: usize) -> SeekResult { fn seek<T: OperationAttributes>(cursor: &mut OperationsCursor<T>, op_offset: usize) -> SeekResult {
let _ = check_bound(cursor.op_offset, op_offset)?; check_bound(cursor.op_offset, op_offset)?;
let mut seek_cursor = OperationsCursor::new(cursor.delta, cursor.origin_iv); let mut seek_cursor = OperationsCursor::new(cursor.delta, cursor.origin_iv);
while let Some((_, op)) = seek_cursor.iter.next() { while let Some((_, op)) = seek_cursor.iter.next() {
@ -226,7 +226,7 @@ pub struct Utf16CodeUnitMetric();
impl Metric for Utf16CodeUnitMetric { impl Metric for Utf16CodeUnitMetric {
fn seek<T: OperationAttributes>(cursor: &mut OperationsCursor<T>, offset: usize) -> SeekResult { fn seek<T: OperationAttributes>(cursor: &mut OperationsCursor<T>, offset: usize) -> SeekResult {
if offset > 0 { if offset > 0 {
let _ = check_bound(cursor.consume_count, offset)?; check_bound(cursor.consume_count, offset)?;
let _ = cursor.next_with_len(Some(offset)); let _ = cursor.next_with_len(Some(offset));
} }

View File

@ -113,9 +113,9 @@ where
{ {
let len = false as usize + 1 + if self.attributes.is_empty() { 0 } else { 1 }; let len = false as usize + 1 + if self.attributes.is_empty() { 0 } else { 1 };
let mut serde_state = serializer.serialize_struct("Retain", len)?; let mut serde_state = serializer.serialize_struct("Retain", len)?;
let _ = serde::ser::SerializeStruct::serialize_field(&mut serde_state, "retain", &self.n)?; serde::ser::SerializeStruct::serialize_field(&mut serde_state, "retain", &self.n)?;
if !self.attributes.is_empty() { if !self.attributes.is_empty() {
let _ = serde::ser::SerializeStruct::serialize_field(&mut serde_state, "attributes", &self.attributes)?; serde::ser::SerializeStruct::serialize_field(&mut serde_state, "attributes", &self.attributes)?;
} }
serde::ser::SerializeStruct::end(serde_state) serde::ser::SerializeStruct::end(serde_state)
} }
@ -216,9 +216,9 @@ where
{ {
let len = false as usize + 1 + if self.attributes.is_empty() { 0 } else { 1 }; let len = false as usize + 1 + if self.attributes.is_empty() { 0 } else { 1 };
let mut serde_state = serializer.serialize_struct("Insert", len)?; let mut serde_state = serializer.serialize_struct("Insert", len)?;
let _ = serde::ser::SerializeStruct::serialize_field(&mut serde_state, "insert", &self.s)?; serde::ser::SerializeStruct::serialize_field(&mut serde_state, "insert", &self.s)?;
if !self.attributes.is_empty() { if !self.attributes.is_empty() {
let _ = serde::ser::SerializeStruct::serialize_field(&mut serde_state, "attributes", &self.attributes)?; serde::ser::SerializeStruct::serialize_field(&mut serde_state, "attributes", &self.attributes)?;
} }
serde::ser::SerializeStruct::end(serde_state) serde::ser::SerializeStruct::end(serde_state)
} }

View File

@ -12,7 +12,7 @@ impl Serialize for NodeOperations {
let operations = self.values(); let operations = self.values();
let mut seq = serializer.serialize_seq(Some(operations.len()))?; let mut seq = serializer.serialize_seq(Some(operations.len()))?;
for operation in operations { for operation in operations {
let _ = seq.serialize_element(&operation)?; seq.serialize_element(&operation)?;
} }
seq.end() seq.end()
} }

View File

@ -249,18 +249,7 @@ impl TransactionBuilder {
} }
fn get_deleted_node_data(&self, node_tree: &NodeTree, node_id: NodeId) -> NodeData { fn get_deleted_node_data(&self, node_tree: &NodeTree, node_id: NodeId) -> NodeData {
let node_data = node_tree.get_node(node_id).unwrap(); recursive_get_deleted_node_data(node_tree, node_id)
let mut children = vec![];
node_tree.get_children_ids(node_id).into_iter().for_each(|child_id| {
children.push(self.get_deleted_node_data(node_tree, child_id));
});
NodeData {
node_type: node_data.node_type.clone(),
attributes: node_data.attributes.clone(),
body: node_data.body.clone(),
children,
}
} }
pub fn push(mut self, op: NodeOperation) -> Self { pub fn push(mut self, op: NodeOperation) -> Self {
@ -272,3 +261,19 @@ impl TransactionBuilder {
Transaction::from_operations(self.operations) Transaction::from_operations(self.operations)
} }
} }
fn recursive_get_deleted_node_data(node_tree: &NodeTree, node_id: NodeId) -> NodeData {
let node_data = node_tree.get_node(node_id).unwrap();
let mut children = vec![];
node_tree.get_children_ids(node_id).into_iter().for_each(|child_id| {
let child = recursive_get_deleted_node_data(node_tree, child_id);
children.push(child);
});
NodeData {
node_type: node_data.node_type.clone(),
attributes: node_data.attributes.clone(),
body: node_data.body.clone(),
children,
}
}

View File

@ -31,7 +31,7 @@ impl NodeTree {
pub fn from_node_data(node_data: NodeData, context: NodeTreeContext) -> Result<Self, OTError> { pub fn from_node_data(node_data: NodeData, context: NodeTreeContext) -> Result<Self, OTError> {
let mut tree = Self::new(context); let mut tree = Self::new(context);
let _ = tree.insert_nodes(&0_usize.into(), vec![node_data])?; tree.insert_nodes(&0_usize.into(), vec![node_data])?;
Ok(tree) Ok(tree)
} }
@ -68,7 +68,7 @@ impl NodeTree {
let operations = operations.into(); let operations = operations.into();
let mut node_tree = NodeTree::new(context); let mut node_tree = NodeTree::new(context);
for (_, operation) in operations.into_inner().into_iter().enumerate() { for (_, operation) in operations.into_inner().into_iter().enumerate() {
let _ = node_tree.apply_op(operation)?; node_tree.apply_op(operation)?;
} }
Ok(node_tree) Ok(node_tree)
} }
@ -76,7 +76,7 @@ impl NodeTree {
pub fn from_transaction<T: Into<Transaction>>(transaction: T, context: NodeTreeContext) -> Result<Self, OTError> { pub fn from_transaction<T: Into<Transaction>>(transaction: T, context: NodeTreeContext) -> Result<Self, OTError> {
let transaction = transaction.into(); let transaction = transaction.into();
let mut tree = Self::new(context); let mut tree = Self::new(context);
let _ = tree.apply_transaction(transaction)?; tree.apply_transaction(transaction)?;
Ok(tree) Ok(tree)
} }
@ -473,7 +473,7 @@ impl NodeTree {
None => tracing::warn!("The path: {:?} does not contain any nodes", path), None => tracing::warn!("The path: {:?} does not contain any nodes", path),
Some(node) => { Some(node) => {
let node = node.get_mut(); let node = node.get_mut();
let _ = f(node)?; f(node)?;
} }
} }
Ok(()) Ok(())

View File

@ -26,7 +26,7 @@ impl Serialize for NodeTree {
let mut seq = serializer.serialize_seq(Some(children.len()))?; let mut seq = serializer.serialize_seq(Some(children.len()))?;
for child in children { for child in children {
if let Some(child_node_data) = self.get_node_data(child) { if let Some(child_node_data) = self.get_node_data(child) {
let _ = seq.serialize_element(&child_node_data)?; seq.serialize_element(&child_node_data)?;
} }
} }
seq.end() seq.end()

View File

@ -4,7 +4,7 @@ macro_rules! inline_attribute_entry {
$key: ident, $key: ident,
$value: ty $value: ty
) => { ) => {
pub fn $key(value: $value) -> crate::core::AttributeEntry { pub fn $key(value: $value) -> $crate::core::AttributeEntry {
AttributeEntry { AttributeEntry {
key: BuildInTextAttributeKey::$key.as_ref().to_string(), key: BuildInTextAttributeKey::$key.as_ref().to_string(),
value: value.into(), value: value.into(),
@ -19,7 +19,7 @@ macro_rules! inline_list_attribute_entry {
$key: ident, $key: ident,
$value: expr $value: expr
) => { ) => {
pub fn $key(b: bool) -> crate::core::AttributeEntry { pub fn $key(b: bool) -> $crate::core::AttributeEntry {
let value = match b { let value = match b {
true => $value, true => $value,
false => "", false => "",

View File

@ -10,7 +10,7 @@ fn transaction_compose_update_after_insert_test() {
// Modify the same path, the operations will be merged after composing if possible. // Modify the same path, the operations will be merged after composing if possible.
let mut transaction_a = TransactionBuilder::new().insert_node_at_path(0, node_data).build(); let mut transaction_a = TransactionBuilder::new().insert_node_at_path(0, node_data).build();
let transaction_b = TransactionBuilder::new().update_node_at_path(0, changeset).build(); let transaction_b = TransactionBuilder::new().update_node_at_path(0, changeset).build();
let _ = transaction_a.compose(transaction_b).unwrap(); transaction_a.compose(transaction_b).unwrap();
// The operations are merged into one operation // The operations are merged into one operation
assert_eq!(transaction_a.operations.len(), 1); assert_eq!(transaction_a.operations.len(), 1);
@ -46,14 +46,14 @@ fn transaction_compose_multiple_update_test() {
let inverted = Transaction::from_operations(other_transaction.operations.inverted()); let inverted = Transaction::from_operations(other_transaction.operations.inverted());
// the update operation will be merged into insert operation // the update operation will be merged into insert operation
let _ = transaction.compose(other_transaction).unwrap(); transaction.compose(other_transaction).unwrap();
assert_eq!(transaction.operations.len(), 1); assert_eq!(transaction.operations.len(), 1);
assert_eq!( assert_eq!(
transaction.to_json().unwrap(), transaction.to_json().unwrap(),
r#"{"operations":[{"op":"insert","path":[0],"nodes":[{"type":"text","body":{"delta":[{"insert":"Hello world😁"}]}}]}]}"# r#"{"operations":[{"op":"insert","path":[0],"nodes":[{"type":"text","body":{"delta":[{"insert":"Hello world😁"}]}}]}]}"#
); );
let _ = transaction.compose(inverted).unwrap(); transaction.compose(inverted).unwrap();
assert_eq!( assert_eq!(
transaction.to_json().unwrap(), transaction.to_json().unwrap(),
r#"{"operations":[{"op":"insert","path":[0],"nodes":[{"type":"text","body":{"delta":[{"insert":"Hello"}]}}]}]}"# r#"{"operations":[{"op":"insert","path":[0],"nodes":[{"type":"text","body":{"delta":[{"insert":"Hello"}]}}]}]}"#

View File

@ -232,8 +232,7 @@ pub struct WSSender(MsgSender);
impl WSSender { impl WSSender {
pub fn send_msg<T: Into<WebSocketRawMessage>>(&self, msg: T) -> Result<(), WSError> { pub fn send_msg<T: Into<WebSocketRawMessage>>(&self, msg: T) -> Result<(), WSError> {
let msg = msg.into(); let msg = msg.into();
let _ = self self.0
.0
.unbounded_send(msg.into()) .unbounded_send(msg.into())
.map_err(|e| WSError::internal().context(e))?; .map_err(|e| WSError::internal().context(e))?;
Ok(()) Ok(())
@ -261,7 +260,7 @@ impl WSSender {
reason: reason.to_owned().into(), reason: reason.to_owned().into(),
}; };
let msg = Message::Close(Some(frame)); let msg = Message::Close(Some(frame));
let _ = self.0.unbounded_send(msg).map_err(|e| WSError::internal().context(e))?; self.0.unbounded_send(msg).map_err(|e| WSError::internal().context(e))?;
Ok(()) Ok(())
} }
} }