chore: add sqlite feature flag (#1683)

* chore: add sqlite feature flag

* chore: fix clippy warings

* ci: fix clippy

* chore: add rev file persistence

* ci: fix clippy

Co-authored-by: nathan <nathan@appflowy.io>
This commit is contained in:
Nathan.fooo 2023-01-12 13:09:08 +08:00 committed by GitHub
parent 860c5d100b
commit 6a36bcd31d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
74 changed files with 281 additions and 226 deletions

View File

@ -66,19 +66,19 @@ jobs:
working-directory: frontend/rust-lib/
- name: clippy rust-lib
run: cargo clippy --no-default-features
run: cargo clippy --features="rev-sqlite"
working-directory: frontend/rust-lib
- name: Run rust-lib tests
working-directory: frontend/rust-lib
run: RUST_LOG=info cargo test --no-default-features --features="sync"
run: RUST_LOG=info cargo test --no-default-features --features="sync,rev-sqlite"
- name: rustfmt shared-lib
run: cargo fmt --all -- --check
working-directory: shared-lib
- name: clippy shared-lib
run: cargo clippy --no-default-features
run: cargo clippy -- -D warnings
working-directory: shared-lib
- name: Run shared-lib tests

View File

@ -23,7 +23,7 @@ CARGO_MAKE_CRATE_FS_NAME = "dart_ffi"
CARGO_MAKE_CRATE_NAME = "dart-ffi"
LIB_NAME = "dart_ffi"
CURRENT_APP_VERSION = "0.0.9.1"
FEATURES = "flutter"
FLUTTER_DESKTOP_FEATURES = "flutter,rev-sqlite"
PRODUCT_NAME = "AppFlowy"
# CRATE_TYPE: https://doc.rust-lang.org/reference/linkage.html
# If you update the macOS's CRATE_TYPE, don't forget to update the
@ -150,7 +150,7 @@ script = ['''
echo BUILD_FLAG: ${BUILD_FLAG}
echo TARGET_OS: ${TARGET_OS}
echo RUST_COMPILE_TARGET: ${RUST_COMPILE_TARGET}
echo FEATURES: ${FEATURES}
echo FEATURES: ${FLUTTER_DESKTOP_FEATURES}
echo PRODUCT_EXT: ${PRODUCT_EXT}
echo APP_ENVIRONMENT: ${APP_ENVIRONMENT}
echo ${platforms}

View File

@ -884,8 +884,12 @@ dependencies = [
"diesel",
"diesel_derives",
"diesel_migrations",
"error-chain",
"lazy_static",
"lib-sqlite",
"libsqlite3-sys",
"openssl",
"r2d2",
"scheduled-thread-pool",
"tracing",
]
@ -928,6 +932,7 @@ dependencies = [
"flowy-error",
"flowy-http-model",
"flowy-revision",
"flowy-revision-persistence",
"flowy-sync",
"flowy-test",
"futures",
@ -964,7 +969,6 @@ dependencies = [
"http-flowy",
"lib-dispatch",
"lib-ot",
"lib-sqlite",
"protobuf",
"r2d2",
"serde_json",
@ -987,6 +991,7 @@ dependencies = [
"flowy-folder",
"flowy-http-model",
"flowy-revision",
"flowy-revision-persistence",
"flowy-sync",
"flowy-test",
"folder-rev-model",
@ -1029,6 +1034,7 @@ dependencies = [
"flowy-grid",
"flowy-http-model",
"flowy-revision",
"flowy-revision-persistence",
"flowy-sync",
"flowy-task",
"flowy-test",
@ -1116,6 +1122,7 @@ dependencies = [
"flowy-error",
"flowy-http-model",
"flowy-revision",
"flowy-revision-persistence",
"futures",
"futures-util",
"lib-infra",
@ -1130,6 +1137,14 @@ dependencies = [
"tracing",
]
[[package]]
name = "flowy-revision-persistence"
version = "0.1.0"
dependencies = [
"flowy-error",
"flowy-http-model",
]
[[package]]
name = "flowy-sync"
version = "0.1.0"
@ -1831,22 +1846,6 @@ dependencies = [
"tracing",
]
[[package]]
name = "lib-sqlite"
version = "0.1.0"
dependencies = [
"diesel",
"diesel_derives",
"diesel_migrations",
"error-chain",
"lazy_static",
"libsqlite3-sys",
"log",
"openssl",
"r2d2",
"scheduled-thread-pool",
]
[[package]]
name = "lib-ws"
version = "0.1.0"

View File

@ -2,7 +2,6 @@
members = [
"lib-dispatch",
"lib-log",
"lib-sqlite",
"flowy-net",
"flowy-core",
"dart-ffi",
@ -14,6 +13,7 @@ members = [
"flowy-document",
"flowy-error",
"flowy-revision",
"flowy-revision-persistence",
"flowy-grid",
"flowy-task",
"flowy-sync",

View File

@ -31,8 +31,9 @@ dart-notify = { path = "../dart-notify" }
flowy-derive = { path = "../flowy-derive" }
[features]
default = ["flowy-core/dart", "flutter"]
default = ["flowy-core/dart", "flutter", "rev-sqlite"]
flutter = []
rev-sqlite = ["flowy-core/rev-sqlite"]
http_sync = ["flowy-core/http_sync", "flowy-core/use_bunyan"]
openssl_vendored = ["flowy-core/openssl_vendored"]

View File

@ -13,7 +13,7 @@ flowy-net = { path = "../flowy-net" }
flowy-folder = { path = "../flowy-folder", default-features = false }
flowy-grid = { path = "../flowy-grid", default-features = false }
grid-rev-model = { path = "../../../shared-lib/grid-rev-model" }
flowy-database = { path = "../flowy-database" }
flowy-database = { path = "../flowy-database", optional = true }
flowy-document = { path = "../flowy-document", default-features = false }
flowy-revision = { path = "../flowy-revision" }
flowy-task = { path = "../flowy-task" }
@ -39,4 +39,11 @@ dart = [
"flowy-grid/dart",
"flowy-document/dart",
]
rev-sqlite = [
"flowy-database",
"flowy-user/rev-sqlite",
"flowy-folder/rev-sqlite",
"flowy-grid/rev-sqlite",
"flowy-document/rev-sqlite",
]
openssl_vendored = ["flowy-database/openssl_vendored"]

View File

@ -79,7 +79,7 @@ impl FlowySDKConfig {
fn crate_log_filter(level: String) -> String {
let level = std::env::var("RUST_LOG").unwrap_or(level);
let mut filters = vec![];
filters.push(format!("flowy_sdk={}", level));
filters.push(format!("flowy_core={}", level));
filters.push(format!("flowy_folder={}", level));
filters.push(format!("flowy_user={}", level));
filters.push(format!("flowy_document={}", level));
@ -95,7 +95,7 @@ fn crate_log_filter(level: String) -> String {
// filters.push(format!("lib_dispatch={}", level));
filters.push(format!("dart_ffi={}", "info"));
filters.push(format!("flowy_database={}", level));
filters.push(format!("flowy_database={}", "info"));
filters.push(format!("flowy_net={}", "info"));
filters.join(",")
}

View File

@ -9,9 +9,14 @@ edition = "2018"
diesel = { version = "1.4.8", features = ["sqlite"] }
diesel_derives = { version = "1.4.1", features = ["sqlite"] }
diesel_migrations = { version = "1.4.0", features = ["sqlite"] }
lib-sqlite = { path = "../lib-sqlite" }
tracing = { version = "0.1", features = ["log"] }
lazy_static = "1.4.0"
r2d2 = "0.8.9"
libsqlite3-sys = { version = ">=0.8.0, <0.24.0", features = ["bundled"] }
scheduled-thread-pool = "0.2.5"
error-chain = "=0.12.0"
openssl = { version = "0.10.38", optional = true }
[features]
openssl_vendored = ["lib-sqlite/openssl_vendored"]
openssl_vendored = ["openssl/vendored"]

View File

@ -1,8 +1,8 @@
use crate::kv::schema::{kv_table, kv_table::dsl, KV_SQL};
use crate::sqlite::{DBConnection, Database, PoolConfig};
use ::diesel::{query_dsl::*, ExpressionMethods};
use diesel::{Connection, SqliteConnection};
use lazy_static::lazy_static;
use lib_sqlite::{DBConnection, Database, PoolConfig};
use std::{path::Path, sync::RwLock};
macro_rules! impl_get_func {

View File

@ -3,13 +3,16 @@ pub use diesel_derives::*;
use diesel_migrations::*;
use std::{fmt::Debug, io, path::Path};
pub mod kv;
mod sqlite;
use crate::sqlite::PoolConfig;
pub use crate::sqlite::{ConnectionPool, DBConnection, Database};
use lib_sqlite::PoolConfig;
pub use lib_sqlite::{ConnectionPool, DBConnection, Database};
pub mod schema;
#[macro_use]
pub mod macros;
#[macro_use]
extern crate diesel;
#[macro_use]
@ -41,7 +44,7 @@ pub fn init(storage_path: &str) -> Result<Database, io::Error> {
fn as_io_error<E>(e: E) -> io::Error
where
E: Into<lib_sqlite::Error> + Debug,
E: Into<crate::sqlite::Error> + Debug,
{
let msg = format!("{:?}", e);
io::Error::new(io::ErrorKind::NotConnected, msg)

View File

@ -1,4 +1,4 @@
use crate::errors::*;
use crate::sqlite::errors::*;
use diesel::{dsl::sql, expression::SqlLiteral, query_dsl::LoadQuery, Connection, RunQueryDsl, SqliteConnection};
pub trait ConnectionExtension: Connection {

View File

@ -1,4 +1,4 @@
use crate::{
use crate::sqlite::{
errors::*,
pool::{ConnectionManager, ConnectionPool, PoolConfig},
};
@ -17,7 +17,7 @@ impl Database {
let uri = db_file_uri(dir, name);
if !std::path::PathBuf::from(dir).exists() {
log::error!("Create database failed. {} not exists", &dir);
tracing::error!("Create database failed. {} not exists", &dir);
}
let pool = ConnectionPool::new(pool_config, &uri)?;

View File

@ -1,4 +1,4 @@
use crate::{errors::*, pragma::*};
use crate::sqlite::{errors::*, pragma::*};
use diesel::{connection::Connection, SqliteConnection};
use r2d2::{CustomizeConnection, ManageConnection, Pool};
use scheduled_thread_pool::ScheduledThreadPool;
@ -45,6 +45,7 @@ impl ConnectionPool {
}
}
#[allow(dead_code)]
pub type OnExecFunc = Box<dyn Fn() -> Box<dyn Fn(&SqliteConnection, &str)> + Send + Sync>;
pub struct PoolConfig {
@ -85,7 +86,7 @@ pub struct ConnectionManager {
impl ManageConnection for ConnectionManager {
type Connection = SqliteConnection;
type Error = crate::Error;
type Error = crate::sqlite::Error;
fn connect(&self) -> Result<Self::Connection> {
Ok(SqliteConnection::establish(&self.db_uri)?)
@ -140,7 +141,7 @@ impl DatabaseCustomizer {
}
}
impl CustomizeConnection<SqliteConnection, crate::Error> for DatabaseCustomizer {
impl CustomizeConnection<SqliteConnection, crate::sqlite::Error> for DatabaseCustomizer {
fn on_acquire(&self, conn: &mut SqliteConnection) -> Result<()> {
conn.pragma_set_busy_timeout(self.config.busy_timeout)?;
if self.config.journal_mode != SQLiteJournalMode::WAL {

View File

@ -1,5 +1,5 @@
#![allow(clippy::upper_case_acronyms)]
use crate::errors::{Error, Result};
use crate::sqlite::errors::{Error, Result};
use diesel::{
expression::SqlLiteral,
query_dsl::load_dsl::LoadQuery,
@ -7,7 +7,7 @@ use diesel::{
SqliteConnection,
};
use crate::conn_ext::ConnectionExtension;
use crate::sqlite::conn_ext::ConnectionExtension;
use std::{
convert::{TryFrom, TryInto},
fmt,
@ -20,7 +20,7 @@ pub trait PragmaExtension: ConnectionExtension {
Some(schema) => format!("PRAGMA {}.{} = '{}'", schema, key, val),
None => format!("PRAGMA {} = '{}'", key, val),
};
log::trace!("SQLITE {}", query);
tracing::trace!("SQLITE {}", query);
self.exec(&query)?;
Ok(())
}
@ -33,7 +33,7 @@ pub trait PragmaExtension: ConnectionExtension {
Some(schema) => format!("PRAGMA {}.{} = '{}'", schema, key, val),
None => format!("PRAGMA {} = '{}'", key, val),
};
log::trace!("SQLITE {}", query);
tracing::trace!("SQLITE {}", query);
self.query::<ST, T>(&query)
}
@ -45,7 +45,7 @@ pub trait PragmaExtension: ConnectionExtension {
Some(schema) => format!("PRAGMA {}.{}", schema, key),
None => format!("PRAGMA {}", key),
};
log::trace!("SQLITE {}", query);
tracing::trace!("SQLITE {}", query);
self.query::<ST, T>(&query)
}

View File

@ -15,8 +15,9 @@ lib-ws = { path = "../../../shared-lib/lib-ws" }
lib-infra = { path = "../../../shared-lib/lib-infra" }
lib-dispatch = { path = "../lib-dispatch" }
flowy-database = { path = "../flowy-database" }
flowy-database = { path = "../flowy-database", optional = true }
flowy-revision = { path = "../flowy-revision" }
flowy-revision-persistence = { path = "../flowy-revision-persistence" }
flowy-error = { path = "../flowy-error", features = ["collaboration", "ot", "http_server", "serde", "db"] }
dart-notify = { path = "../dart-notify" }
@ -58,5 +59,6 @@ flowy-codegen = { path = "../flowy-codegen"}
[features]
sync = []
cloud_sync = ["sync"]
rev-sqlite = ["flowy-database"]
flowy_unit_test = ["lib-ot/flowy_unit_test", "flowy-revision/flowy_unit_test"]
dart = ["flowy-codegen/dart", "dart-notify/dart"]

View File

@ -6,7 +6,7 @@ use flowy_database::kv::KV;
use flowy_error::FlowyResult;
use flowy_http_model::revision::Revision;
use flowy_http_model::util::md5;
use flowy_revision::disk::{RevisionDiskCache, SyncRecord};
use flowy_revision_persistence::{RevisionDiskCache, SyncRecord};
use flowy_sync::util::make_operations_from_revisions;
use std::sync::Arc;

View File

@ -11,7 +11,7 @@ use flowy_http_model::{
revision::{Revision, RevisionRange},
util::md5,
};
use flowy_revision::disk::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord};
use flowy_revision_persistence::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord};
use std::collections::HashMap;
use std::sync::Arc;
@ -25,7 +25,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteDeltaDocumentRevisionPersi
fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
DeltaRevisionSql::create(revision_records, &*conn)?;
DeltaRevisionSql::create(revision_records, &conn)?;
Ok(())
}
@ -57,7 +57,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteDeltaDocumentRevisionPersi
let conn = &*self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| {
for changeset in changesets {
let _ = DeltaRevisionSql::update(changeset, conn)?;
DeltaRevisionSql::update(changeset, conn)?;
}
Ok(())
})?;
@ -78,8 +78,8 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteDeltaDocumentRevisionPersi
) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| {
DeltaRevisionSql::delete(object_id, deleted_rev_ids, &*conn)?;
DeltaRevisionSql::create(inserted_records, &*conn)?;
DeltaRevisionSql::delete(object_id, deleted_rev_ids, &conn)?;
DeltaRevisionSql::create(inserted_records, &conn)?;
Ok(())
})
}

View File

@ -11,7 +11,7 @@ use flowy_http_model::{
revision::{Revision, RevisionRange},
util::md5,
};
use flowy_revision::disk::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord};
use flowy_revision_persistence::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord};
use std::sync::Arc;
pub struct SQLiteDocumentRevisionPersistence {
@ -24,7 +24,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteDocumentRevisionPersistenc
fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
DocumentRevisionSql::create(revision_records, &*conn)?;
DocumentRevisionSql::create(revision_records, &conn)?;
Ok(())
}
@ -56,7 +56,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteDocumentRevisionPersistenc
let conn = &*self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| {
for changeset in changesets {
let _ = DocumentRevisionSql::update(changeset, conn)?;
DocumentRevisionSql::update(changeset, conn)?;
}
Ok(())
})?;
@ -77,8 +77,8 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteDocumentRevisionPersistenc
) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| {
DocumentRevisionSql::delete(object_id, deleted_rev_ids, &*conn)?;
DocumentRevisionSql::create(inserted_records, &*conn)?;
DocumentRevisionSql::delete(object_id, deleted_rev_ids, &conn)?;
DocumentRevisionSql::create(inserted_records, &conn)?;
Ok(())
})
}

View File

@ -1,5 +1,5 @@
use crate::old_document::script::{EditorScript::*, *};
use flowy_revision::disk::RevisionState;
use flowy_revision_persistence::RevisionState;
use lib_ot::core::{count_utf16_code_units, Interval};
#[tokio::test]

View File

@ -1,6 +1,6 @@
use flowy_document::old_editor::editor::DeltaDocumentEditor;
use flowy_document::TEXT_BLOCK_SYNC_INTERVAL_IN_MILLIS;
use flowy_revision::disk::RevisionState;
use flowy_revision_persistence::RevisionState;
use flowy_test::{helper::ViewTest, FlowySDKTest};
use lib_ot::{core::Interval, text_delta::DeltaTextOperations};
use std::sync::Arc;

View File

@ -20,14 +20,13 @@ serde_json = {version = "1.0", optional = true}
http-flowy = { git = "https://github.com/AppFlowy-IO/AppFlowy-Server", optional = true}
flowy-database = { path = "../flowy-database", optional = true}
r2d2 = { version = "0.8", optional = true}
lib-sqlite = { path = "../lib-sqlite", optional = true }
[features]
collaboration = ["flowy-sync"]
ot = ["lib-ot"]
serde = ["serde_json"]
http_server = ["http-flowy"]
db = ["flowy-database", "lib-sqlite", "r2d2"]
db = ["flowy-database", "r2d2"]
dart = ["flowy-codegen/dart"]
[build-dependencies]

View File

@ -11,11 +11,3 @@ impl std::convert::From<::r2d2::Error> for FlowyError {
FlowyError::internal().context(error)
}
}
// use diesel::result::{Error, DatabaseErrorKind};
// use lib_sqlite::ErrorKind;
impl std::convert::From<lib_sqlite::Error> for FlowyError {
fn from(error: lib_sqlite::Error) -> Self {
FlowyError::internal().context(error)
}
}

View File

@ -14,11 +14,12 @@ lib-ot = { path = "../../../shared-lib/lib-ot" }
lib-infra = { path = "../../../shared-lib/lib-infra" }
flowy-document = { path = "../flowy-document" }
flowy-database = { path = "../flowy-database" }
flowy-database = { path = "../flowy-database", optional = true }
flowy-error = { path = "../flowy-error", features = ["db", "http_server"]}
dart-notify = { path = "../dart-notify" }
lib-dispatch = { path = "../lib-dispatch" }
flowy-revision = { path = "../flowy-revision" }
flowy-revision-persistence = { path = "../flowy-revision-persistence" }
parking_lot = "0.12.1"
protobuf = {version = "2.18.0"}
@ -49,5 +50,6 @@ flowy-codegen = { path = "../flowy-codegen"}
default = []
sync = []
cloud_sync = ["sync"]
rev-sqlite = ["flowy-database", "flowy-folder/rev-sqlite"]
flowy_unit_test = ["lib-ot/flowy_unit_test", "flowy-revision/flowy_unit_test"]
dart = ["flowy-codegen/dart", "dart-notify/dart"]

View File

@ -105,7 +105,7 @@ impl std::default::Default for TrashType {
}
}
#[derive(PartialEq, ProtoBuf, Default, Debug, Clone)]
#[derive(PartialEq, Eq, ProtoBuf, Default, Debug, Clone)]
pub struct RepeatedTrashIdPB {
#[pb(index = 1)]
pub items: Vec<TrashIdPB>,

View File

@ -52,8 +52,8 @@ impl AppController {
pub(crate) async fn create_app_on_local(&self, app: AppRevision) -> Result<AppPB, FlowyError> {
self.persistence
.begin_transaction(|transaction| {
let _ = transaction.create_app(app.clone())?;
let _ = notify_apps_changed(&app.workspace_id, self.trash_controller.clone(), &transaction)?;
transaction.create_app(app.clone())?;
notify_apps_changed(&app.workspace_id, self.trash_controller.clone(), &transaction)?;
Ok(())
})
.await?;
@ -101,9 +101,9 @@ impl AppController {
pub(crate) async fn move_app(&self, app_id: &str, from: usize, to: usize) -> FlowyResult<()> {
self.persistence
.begin_transaction(|transaction| {
let _ = transaction.move_app(app_id, from, to)?;
transaction.move_app(app_id, from, to)?;
let app = transaction.read_app(app_id)?;
let _ = notify_apps_changed(&app.workspace_id, self.trash_controller.clone(), &transaction)?;
notify_apps_changed(&app.workspace_id, self.trash_controller.clone(), &transaction)?;
Ok(())
})
.await?;

View File

@ -3,6 +3,7 @@ pub mod rev_sqlite;
pub mod version_1;
mod version_2;
use crate::services::persistence::rev_sqlite::SQLiteFolderRevisionPersistence;
use crate::{
event_map::WorkspaceDatabase,
manager::FolderId,
@ -11,12 +12,10 @@ use crate::{
use flowy_database::ConnectionPool;
use flowy_error::{FlowyError, FlowyResult};
use flowy_http_model::revision::Revision;
use flowy_revision::disk::{RevisionDiskCache, RevisionState, SyncRecord};
use flowy_revision_persistence::{RevisionDiskCache, RevisionState, SyncRecord};
use flowy_sync::client_folder::FolderPad;
use folder_rev_model::{AppRevision, TrashRevision, ViewRevision, WorkspaceRevision};
use crate::services::persistence::rev_sqlite::SQLiteFolderRevisionPersistence;
use flowy_sync::server_folder::FolderOperationsBuilder;
use folder_rev_model::{AppRevision, TrashRevision, ViewRevision, WorkspaceRevision};
use std::sync::Arc;
use tokio::sync::RwLock;
pub use version_1::{app_sql::*, trash_sql::*, v1_impl::V1Transaction, view_sql::*, workspace_sql::*};

View File

@ -9,7 +9,7 @@ use flowy_database::{
use flowy_error::{internal_error, FlowyError, FlowyResult};
use flowy_http_model::revision::{Revision, RevisionRange};
use flowy_http_model::util::md5;
use flowy_revision::disk::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord};
use flowy_revision_persistence::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord};
use std::sync::Arc;
pub struct SQLiteFolderRevisionPersistence {
@ -22,7 +22,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteFolderRevisionPersistence
fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
FolderRevisionSql::create(revision_records, &*conn)?;
FolderRevisionSql::create(revision_records, &conn)?;
Ok(())
}
@ -54,7 +54,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteFolderRevisionPersistence
let conn = &*self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| {
for changeset in changesets {
let _ = FolderRevisionSql::update(changeset, conn)?;
FolderRevisionSql::update(changeset, conn)?;
}
Ok(())
})?;
@ -75,8 +75,8 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteFolderRevisionPersistence
) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| {
FolderRevisionSql::delete(object_id, deleted_rev_ids, &*conn)?;
FolderRevisionSql::create(inserted_records, &*conn)?;
FolderRevisionSql::delete(object_id, deleted_rev_ids, &conn)?;
FolderRevisionSql::create(inserted_records, &conn)?;
Ok(())
})
}

View File

@ -1,3 +1,4 @@
#![allow(clippy::unused_unit)]
use bytes::Bytes;
use flowy_database::{
prelude::*,

View File

@ -15,7 +15,7 @@ pub struct V1Transaction<'a>(pub &'a DBConnection);
impl<'a> FolderPersistenceTransaction for V1Transaction<'a> {
fn create_workspace(&self, user_id: &str, workspace_rev: WorkspaceRevision) -> FlowyResult<()> {
WorkspaceTableSql::create_workspace(user_id, workspace_rev, &*self.0)?;
WorkspaceTableSql::create_workspace(user_id, workspace_rev, self.0)?;
Ok(())
}
@ -34,12 +34,12 @@ impl<'a> FolderPersistenceTransaction for V1Transaction<'a> {
}
fn create_app(&self, app_rev: AppRevision) -> FlowyResult<()> {
AppTableSql::create_app(app_rev, &*self.0)?;
AppTableSql::create_app(app_rev, self.0)?;
Ok(())
}
fn update_app(&self, changeset: AppChangeset) -> FlowyResult<()> {
AppTableSql::update_app(changeset, &*self.0)?;
AppTableSql::update_app(changeset, self.0)?;
Ok(())
}
@ -64,7 +64,7 @@ impl<'a> FolderPersistenceTransaction for V1Transaction<'a> {
}
fn create_view(&self, view_rev: ViewRevision) -> FlowyResult<()> {
ViewTableSql::create_view(view_rev, &*self.0)?;
ViewTableSql::create_view(view_rev, self.0)?;
Ok(())
}
@ -80,13 +80,13 @@ impl<'a> FolderPersistenceTransaction for V1Transaction<'a> {
}
fn update_view(&self, changeset: ViewChangeset) -> FlowyResult<()> {
ViewTableSql::update_view(changeset, &*self.0)?;
ViewTableSql::update_view(changeset, self.0)?;
Ok(())
}
fn delete_view(&self, view_id: &str) -> FlowyResult<ViewRevision> {
let view_revision: ViewRevision = ViewTableSql::read_view(view_id, self.0)?.into();
ViewTableSql::delete_view(view_id, &*self.0)?;
ViewTableSql::delete_view(view_id, self.0)?;
Ok(view_revision)
}
@ -95,7 +95,7 @@ impl<'a> FolderPersistenceTransaction for V1Transaction<'a> {
}
fn create_trash(&self, trashes: Vec<TrashRevision>) -> FlowyResult<()> {
TrashTableSql::create_trash(trashes, &*self.0)?;
TrashTableSql::create_trash(trashes, self.0)?;
Ok(())
}
@ -114,7 +114,7 @@ impl<'a> FolderPersistenceTransaction for V1Transaction<'a> {
None => TrashTableSql::delete_all(self.0),
Some(trash_ids) => {
for trash_id in &trash_ids {
TrashTableSql::delete_trash(trash_id, &*self.0)?;
TrashTableSql::delete_trash(trash_id, self.0)?;
}
Ok(())
}

View File

@ -168,7 +168,7 @@ impl TrashController {
self.persistence
.begin_transaction(|transaction| {
let _ = transaction.create_trash(trash_revs.clone())?;
transaction.create_trash(trash_revs.clone())?;
let _ = self.create_trash_on_server(trash_revs);
notify_trash_changed(transaction.read_trash(None)?);

View File

@ -238,9 +238,9 @@ impl ViewController {
pub(crate) async fn move_view(&self, view_id: &str, from: usize, to: usize) -> Result<(), FlowyError> {
self.persistence
.begin_transaction(|transaction| {
let _ = transaction.move_view(view_id, from, to)?;
transaction.move_view(view_id, from, to)?;
let view = transaction.read_view(view_id)?;
let _ = notify_views_changed(&view.app_id, self.trash_controller.clone(), &transaction)?;
notify_views_changed(&view.app_id, self.trash_controller.clone(), &transaction)?;
Ok(())
})
.await?;

View File

@ -122,7 +122,7 @@ fn read_workspaces_on_server(
for workspace_rev in &workspace_revs {
let m_workspace = workspace_rev.clone();
let app_revs = m_workspace.apps.clone();
let _ = transaction.create_workspace(&user_id, m_workspace)?;
transaction.create_workspace(&user_id, m_workspace)?;
tracing::trace!("Save {} apps", app_revs.len());
for app_rev in app_revs {
let view_revs = app_rev.belongings.clone();

View File

@ -1,7 +1,7 @@
use crate::script::{invalid_workspace_name_test_case, FolderScript::*, FolderTest};
use flowy_folder::entities::view::ViewDataFormatPB;
use flowy_folder::entities::workspace::CreateWorkspacePayloadPB;
use flowy_revision::disk::RevisionState;
use flowy_revision_persistence::RevisionState;
use flowy_test::{event_builder::*, FlowySDKTest};
#[tokio::test]

View File

@ -15,8 +15,8 @@ use flowy_folder::entities::{
};
use flowy_folder::event_map::FolderEvent::*;
use flowy_folder::{errors::ErrorCode, services::folder_editor::FolderEditor};
use flowy_revision::disk::RevisionState;
use flowy_revision::REVISION_WRITE_INTERVAL_IN_MILLIS;
use flowy_revision_persistence::RevisionState;
use flowy_test::{event_builder::*, FlowySDKTest};
use std::{sync::Arc, time::Duration};
use tokio::time::sleep;
@ -259,13 +259,12 @@ pub async fn create_workspace(sdk: &FlowySDKTest, name: &str, desc: &str) -> Wor
desc: desc.to_owned(),
};
let workspace = FolderEventBuilder::new(sdk.clone())
FolderEventBuilder::new(sdk.clone())
.event(CreateWorkspace)
.payload(request)
.async_send()
.await
.parse::<WorkspacePB>();
workspace
.parse::<WorkspacePB>()
}
pub async fn read_workspace(sdk: &FlowySDKTest, workspace_id: Option<String>) -> Vec<WorkspacePB> {
@ -300,13 +299,12 @@ pub async fn create_app(sdk: &FlowySDKTest, workspace_id: &str, name: &str, desc
color_style: Default::default(),
};
let app = FolderEventBuilder::new(sdk.clone())
FolderEventBuilder::new(sdk.clone())
.event(CreateApp)
.payload(create_app_request)
.async_send()
.await
.parse::<AppPB>();
app
.parse::<AppPB>()
}
pub async fn read_app(sdk: &FlowySDKTest, app_id: &str) -> AppPB {
@ -314,14 +312,12 @@ pub async fn read_app(sdk: &FlowySDKTest, app_id: &str) -> AppPB {
value: app_id.to_owned(),
};
let app = FolderEventBuilder::new(sdk.clone())
FolderEventBuilder::new(sdk.clone())
.event(ReadApp)
.payload(request)
.async_send()
.await
.parse::<AppPB>();
app
.parse::<AppPB>()
}
pub async fn update_app(sdk: &FlowySDKTest, app_id: &str, name: Option<String>, desc: Option<String>) {
@ -369,13 +365,12 @@ pub async fn create_view(
layout,
view_content_data: vec![],
};
let view = FolderEventBuilder::new(sdk.clone())
FolderEventBuilder::new(sdk.clone())
.event(CreateView)
.payload(request)
.async_send()
.await
.parse::<ViewPB>();
view
.parse::<ViewPB>()
}
pub async fn read_view(sdk: &FlowySDKTest, view_id: &str) -> ViewPB {

View File

@ -9,6 +9,7 @@ edition = "2021"
lib-dispatch = { path = "../lib-dispatch" }
dart-notify = { path = "../dart-notify" }
flowy-revision = { path = "../flowy-revision" }
flowy-revision-persistence = { path = "../flowy-revision-persistence" }
flowy-task= { path = "../flowy-task" }
flowy-error = { path = "../flowy-error", features = ["db"]}
flowy-derive = { path = "../flowy-derive" }
@ -17,7 +18,7 @@ lib-infra = { path = "../../../shared-lib/lib-infra" }
grid-rev-model = { path = "../../../shared-lib/grid-rev-model" }
flowy-sync = { path = "../flowy-sync"}
flowy-http-model = { path = "../../../shared-lib/flowy-http-model" }
flowy-database = { path = "../flowy-database" }
flowy-database = { path = "../flowy-database", optional = true }
anyhow = "1.0"
strum = "0.21"
@ -56,5 +57,6 @@ flowy-codegen = { path = "../flowy-codegen"}
[features]
default = []
rev-sqlite = ["flowy-database"]
dart = ["flowy-codegen/dart", "dart-notify/dart"]
flowy_unit_test = ["flowy-revision/flowy_unit_test"]

View File

@ -182,7 +182,7 @@ impl CellDataChangeset for NumberTypeOptionPB {
let number_cell_data = self.format_cell_data(&data)?;
match self.format {
NumberFormat::Num => Ok((number_cell_data.to_string().into(), number_cell_data.to_string().into())),
NumberFormat::Num => Ok((number_cell_data.to_string(), number_cell_data.to_string().into())),
_ => Ok((data, number_cell_data.to_string().into())),
}
}

View File

@ -9,7 +9,7 @@ use flowy_database::{
use flowy_error::{internal_error, FlowyError, FlowyResult};
use flowy_http_model::revision::{Revision, RevisionRange};
use flowy_http_model::util::md5;
use flowy_revision::disk::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord};
use flowy_revision_persistence::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord};
use std::sync::Arc;
pub struct SQLiteGridBlockRevisionPersistence {
@ -22,7 +22,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridBlockRevisionPersisten
fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
GridMetaRevisionSql::create(revision_records, &*conn)?;
GridMetaRevisionSql::create(revision_records, &conn)?;
Ok(())
}
@ -54,7 +54,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridBlockRevisionPersisten
let conn = &*self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| {
for changeset in changesets {
let _ = GridMetaRevisionSql::update(changeset, conn)?;
GridMetaRevisionSql::update(changeset, conn)?;
}
Ok(())
})?;
@ -75,8 +75,8 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridBlockRevisionPersisten
) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| {
GridMetaRevisionSql::delete(object_id, deleted_rev_ids, &*conn)?;
GridMetaRevisionSql::create(inserted_records, &*conn)?;
GridMetaRevisionSql::delete(object_id, deleted_rev_ids, &conn)?;
GridMetaRevisionSql::create(inserted_records, &conn)?;
Ok(())
})
}

View File

@ -1,3 +1,4 @@
#![allow(clippy::unused_unit)]
use bytes::Bytes;
use flowy_database::{
prelude::*,

View File

@ -9,7 +9,7 @@ use flowy_database::{
use flowy_error::{internal_error, FlowyError, FlowyResult};
use flowy_http_model::revision::{Revision, RevisionRange};
use flowy_http_model::util::md5;
use flowy_revision::disk::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord};
use flowy_revision_persistence::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord};
use std::sync::Arc;
pub struct SQLiteGridRevisionPersistence {
@ -22,7 +22,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridRevisionPersistence {
fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
GridRevisionSql::create(revision_records, &*conn)?;
GridRevisionSql::create(revision_records, &conn)?;
Ok(())
}
@ -54,7 +54,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridRevisionPersistence {
let conn = &*self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| {
for changeset in changesets {
let _ = GridRevisionSql::update(changeset, conn)?;
GridRevisionSql::update(changeset, conn)?;
}
Ok(())
})?;
@ -75,8 +75,8 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridRevisionPersistence {
) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| {
GridRevisionSql::delete(object_id, deleted_rev_ids, &*conn)?;
GridRevisionSql::create(inserted_records, &*conn)?;
GridRevisionSql::delete(object_id, deleted_rev_ids, &conn)?;
GridRevisionSql::create(inserted_records, &conn)?;
Ok(())
})
}

View File

@ -9,7 +9,7 @@ use flowy_database::{
use flowy_error::{internal_error, FlowyError, FlowyResult};
use flowy_http_model::revision::{Revision, RevisionRange};
use flowy_http_model::util::md5;
use flowy_revision::disk::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord};
use flowy_revision_persistence::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord};
use std::sync::Arc;
pub struct SQLiteGridViewRevisionPersistence {
@ -31,7 +31,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridViewRevisionPersistenc
fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
GridViewRevisionSql::create(revision_records, &*conn)?;
GridViewRevisionSql::create(revision_records, &conn)?;
Ok(())
}
@ -63,7 +63,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridViewRevisionPersistenc
let conn = &*self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| {
for changeset in changesets {
let _ = GridViewRevisionSql::update(changeset, conn)?;
GridViewRevisionSql::update(changeset, conn)?;
}
Ok(())
})?;
@ -84,8 +84,8 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridViewRevisionPersistenc
) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| {
GridViewRevisionSql::delete(object_id, deleted_rev_ids, &*conn)?;
GridViewRevisionSql::create(inserted_records, &*conn)?;
GridViewRevisionSql::delete(object_id, deleted_rev_ids, &conn)?;
GridViewRevisionSql::create(inserted_records, &conn)?;
Ok(())
})
}

View File

@ -94,9 +94,8 @@ async fn url_cell_date_test() {
for (i, cell) in cells.iter().enumerate() {
let url_cell_data = cell.get_url_field_cell_data().unwrap();
match i {
0 => assert_eq!(url_cell_data.url.as_str(), "https://www.appflowy.io/"),
_ => {}
if i == 0 {
assert_eq!(url_cell_data.url.as_str(), "https://www.appflowy.io/")
}
}
}

View File

@ -47,9 +47,7 @@ impl GridSnapshotTest {
}
pub async fn grid_pad(&self) -> GridRevisionPad {
let pad = self.editor.grid_pad();
let pad = (*pad.read().await).clone();
pad
self.editor.grid_pad().read().await.clone()
}
pub async fn run_scripts(&mut self, scripts: Vec<SnapshotScript>) {

View File

@ -48,7 +48,8 @@ impl LocalServer {
}
pub async fn stop(&self) {
if let Some(stop_tx) = self.stop_tx.read().clone() {
let sender = self.stop_tx.read().clone();
if let Some(stop_tx) = sender {
let _ = stop_tx.send(()).await;
}
}

View File

@ -0,0 +1,13 @@
[package]
name = "flowy-revision-persistence"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
flowy-error = { path = "../flowy-error" }
flowy-http-model = { path = "../../../shared-lib/flowy-http-model" }
[features]
rev-file = []

View File

@ -0,0 +1,54 @@
use crate::{RevisionChangeset, RevisionDiskCache, SyncRecord};
use flowy_error::FlowyResult;
use flowy_http_model::revision::RevisionRange;
pub struct FileRevisionDiskCache {
path: String,
}
pub type FileRevisionDiskCacheConnection = ();
impl RevisionDiskCache<FileRevisionDiskCacheConnection> for FileRevisionDiskCache {
type Error = ();
fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> {
Ok(())
}
fn get_connection(&self) -> Result<FileRevisionDiskCacheConnection, Self::Error> {
return Ok(());
}
fn read_revision_records(
&self,
object_id: &str,
rev_ids: Option<Vec<i64>>,
) -> Result<Vec<SyncRecord>, Self::Error> {
Ok(vec![])
}
fn read_revision_records_with_range(
&self,
object_id: &str,
range: &RevisionRange,
) -> Result<Vec<SyncRecord>, Self::Error> {
Ok(vec![])
}
fn update_revision_record(&self, changesets: Vec<RevisionChangeset>) -> FlowyResult<()> {
Ok(())
}
fn delete_revision_records(&self, object_id: &str, rev_ids: Option<Vec<i64>>) -> Result<(), Self::Error> {
Ok(())
}
fn delete_and_insert_records(
&self,
object_id: &str,
deleted_rev_ids: Option<Vec<i64>>,
inserted_records: Vec<SyncRecord>,
) -> Result<(), Self::Error> {
todo!()
}
}

View File

@ -0,0 +1,2 @@
#[cfg(feature = "rev-file")]
pub mod file_persistence;

View File

@ -1,3 +1,5 @@
mod disk_cache_impl;
use flowy_error::{FlowyError, FlowyResult};
use flowy_http_model::revision::{Revision, RevisionRange};
use std::fmt::Debug;

View File

@ -10,6 +10,7 @@ flowy-http-model = { path = "../../../shared-lib/flowy-http-model" }
lib-ws = { path = "../../../shared-lib/lib-ws" }
lib-infra = { path = "../../../shared-lib/lib-infra" }
flowy-error = { path = "../flowy-error" }
flowy-revision-persistence= { path = "../flowy-revision-persistence" }
tracing = { version = "0.1", features = ["log"] }
tokio = {version = "1", features = ["sync"]}
bytes = { version = "1.1" }

View File

@ -1,8 +1,8 @@
use crate::disk::SyncRecord;
use crate::REVISION_WRITE_INTERVAL_IN_MILLIS;
use dashmap::DashMap;
use flowy_error::{FlowyError, FlowyResult};
use flowy_http_model::revision::RevisionRange;
use flowy_revision_persistence::SyncRecord;
use std::{borrow::Cow, sync::Arc, time::Duration};
use tokio::{sync::RwLock, task::JoinHandle};

View File

@ -1,3 +1,2 @@
pub mod disk;
pub(crate) mod memory;
pub mod reset;

View File

@ -1,8 +1,8 @@
use crate::disk::{RevisionDiskCache, SyncRecord};
use crate::{RevisionLoader, RevisionPersistence, RevisionPersistenceConfiguration};
use bytes::Bytes;
use flowy_error::{FlowyError, FlowyResult};
use flowy_http_model::revision::Revision;
use flowy_revision_persistence::{RevisionDiskCache, SyncRecord};
use serde::{Deserialize, Serialize};
use std::str::FromStr;
use std::sync::Arc;

View File

@ -286,7 +286,7 @@ impl<Connection: 'static> RevisionManager<Connection> {
pub fn ack_notify(&self) -> tokio::sync::broadcast::Receiver<i64> {
self.rev_ack_notifier.subscribe()
}
pub fn get_all_revision_records(&self) -> FlowyResult<Vec<crate::disk::SyncRecord>> {
pub fn get_all_revision_records(&self) -> FlowyResult<Vec<flowy_revision_persistence::SyncRecord>> {
self.rev_persistence.load_all_records(&self.object_id)
}
}

View File

@ -1,12 +1,9 @@
use crate::cache::{
disk::{RevisionChangeset, RevisionDiskCache},
memory::RevisionMemoryCacheDelegate,
};
use crate::disk::{RevisionState, SyncRecord};
use crate::cache::memory::RevisionMemoryCacheDelegate;
use crate::memory::RevisionMemoryCache;
use crate::RevisionMergeable;
use flowy_error::{internal_error, FlowyError, FlowyResult};
use flowy_http_model::revision::{Revision, RevisionRange};
use flowy_revision_persistence::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord};
use std::collections::{HashMap, VecDeque};
use std::{borrow::Cow, sync::Arc};

View File

@ -1,10 +1,10 @@
use bytes::Bytes;
use flowy_error::{internal_error, FlowyError, FlowyResult};
use flowy_revision::disk::{RevisionChangeset, RevisionDiskCache, SyncRecord};
use flowy_revision::{
RevisionManager, RevisionMergeable, RevisionObjectDeserializer, RevisionPersistence,
RevisionPersistenceConfiguration, RevisionSnapshot, RevisionSnapshotDiskCache, REVISION_WRITE_INTERVAL_IN_MILLIS,
};
use flowy_revision_persistence::{RevisionChangeset, RevisionDiskCache, SyncRecord};
use flowy_http_model::revision::{Revision, RevisionRange};
use flowy_http_model::util::md5;

View File

@ -70,13 +70,12 @@ async fn create_workspace(sdk: &FlowySDKTest, name: &str, desc: &str) -> Workspa
desc: desc.to_owned(),
};
let workspace = FolderEventBuilder::new(sdk.clone())
FolderEventBuilder::new(sdk.clone())
.event(CreateWorkspace)
.payload(request)
.async_send()
.await
.parse::<WorkspacePB>();
workspace
.parse::<WorkspacePB>()
}
async fn open_workspace(sdk: &FlowySDKTest, workspace_id: &str) {
@ -98,13 +97,12 @@ async fn create_app(sdk: &FlowySDKTest, name: &str, desc: &str, workspace_id: &s
color_style: Default::default(),
};
let app = FolderEventBuilder::new(sdk.clone())
FolderEventBuilder::new(sdk.clone())
.event(CreateApp)
.payload(create_app_request)
.async_send()
.await
.parse::<AppPB>();
app
.parse::<AppPB>()
}
async fn create_view(
@ -124,13 +122,12 @@ async fn create_view(
view_content_data: data,
};
let view = FolderEventBuilder::new(sdk.clone())
FolderEventBuilder::new(sdk.clone())
.event(CreateView)
.payload(request)
.async_send()
.await
.parse::<ViewPB>();
view
.parse::<ViewPB>()
}
pub fn root_dir() -> String {

View File

@ -45,8 +45,7 @@ impl FlowySDKTest {
}
pub async fn sign_up(&self) -> SignUpContext {
let context = async_sign_up(self.inner.dispatcher()).await;
context
async_sign_up(self.inner.dispatcher()).await
}
pub async fn init_user(&self) -> UserProfilePB {

View File

@ -7,7 +7,7 @@ edition = "2018"
[dependencies]
flowy-derive = { path = "../flowy-derive" }
flowy-database = { path = "../flowy-database" }
flowy-database = { path = "../flowy-database", optional = true }
flowy-error = { path = "../flowy-error", features = ["db", "http_server"] }
lib-infra = { path = "../../../shared-lib/lib-infra" }
@ -45,6 +45,7 @@ rand_core = "0.6.3"
rand = "0.8.5"
[features]
rev-sqlite = ["flowy-database"]
dart = ["flowy-codegen/dart", "dart-notify/dart"]
[build-dependencies]

View File

@ -1,21 +0,0 @@
[package]
name = "lib-sqlite"
version = "0.1.0"
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
r2d2 = "0.8.9"
libsqlite3-sys = { version = ">=0.8.0, <0.24.0", features = ["bundled"] }
diesel = { version = "1.4.8", features = ["sqlite"] }
diesel_derives = { version = "1.4.1", features = ["sqlite"] }
diesel_migrations = { version = "1.4.0", features = ["sqlite"] }
lazy_static = "1.4.0"
scheduled-thread-pool = "0.2.5"
error-chain = "=0.12.0"
log = "0.4.11"
openssl = { version = "0.10.38", optional = true }
[features]
openssl_vendored = ["openssl/vendored"]

View File

@ -67,8 +67,8 @@ script = [
"""
cd rust-lib/
rustup show
echo cargo build --package=dart-ffi --target ${RUST_COMPILE_TARGET} --features "${FEATURES}"
cargo build --package=dart-ffi --target ${RUST_COMPILE_TARGET} --features "${FEATURES}"
echo cargo build --package=dart-ffi --target ${RUST_COMPILE_TARGET} --features "${FLUTTER_DESKTOP_FEATURES}"
cargo build --package=dart-ffi --target ${RUST_COMPILE_TARGET} --features "${FLUTTER_DESKTOP_FEATURES}"
cd ../
""",
]
@ -105,7 +105,7 @@ private = true
script = [
"""
cd rust-lib
exec cargo build --package=dart-ffi --target ${RUST_COMPILE_TARGET} --features "${FEATURES}"
exec cargo build --package=dart-ffi --target ${RUST_COMPILE_TARGET} --features "${FLUTTER_DESKTOP_FEATURES}"
cd ..
""",
]
@ -128,7 +128,7 @@ run_task = { name = [
script = [
"""
cd rust-lib/
cargo build --${BUILD_FLAG} --package=dart-ffi --target ${RUST_COMPILE_TARGET} --features "${FEATURES}"
cargo build --${BUILD_FLAG} --package=dart-ffi --target ${RUST_COMPILE_TARGET} --features "${FLUTTER_DESKTOP_FEATURES}"
cd ../
""",
]
@ -216,8 +216,8 @@ script = [
"""
cd rust-lib/
rustup show
echo cargo build --package=dart-ffi --target ${TEST_COMPILE_TARGET} --features "${FEATURES}"
RUST_LOG=${RUST_LOG} cargo build --package=dart-ffi --target ${TEST_COMPILE_TARGET} --features "${FEATURES}"
echo cargo build --package=dart-ffi --target ${TEST_COMPILE_TARGET} --features "${FLUTTER_DESKTOP_FEATURES}"
RUST_LOG=${RUST_LOG} cargo build --package=dart-ffi --target ${TEST_COMPILE_TARGET} --features "${FLUTTER_DESKTOP_FEATURES}"
cd ../
""",
]
@ -229,8 +229,8 @@ script = [
"""
cd rust-lib/
rustup show
echo cargo build --package=dart-ffi --target ${TEST_COMPILE_TARGET} --features "${FEATURES}"
cargo build --package=dart-ffi --target ${TEST_COMPILE_TARGET} --features "${FEATURES}"
echo cargo build --package=dart-ffi --target ${TEST_COMPILE_TARGET} --features "${FLUTTER_DESKTOP_FEATURES}"
cargo build --package=dart-ffi --target ${TEST_COMPILE_TARGET} --features "${FLUTTER_DESKTOP_FEATURES}"
cd ../
""",
]

View File

@ -32,7 +32,7 @@ run_task = { name = ["rust_lib_unit_test", "shared_lib_unit_test"] }
description = "Run rust-lib unit tests"
script = '''
cd rust-lib
cargo test --no-default-features --features="sync"
cargo test --no-default-features --features="sync, rev-sqlite"
'''
[tasks.shared_lib_unit_test]

View File

@ -67,9 +67,9 @@ impl Revision {
impl std::fmt::Debug for Revision {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
let _ = f.write_fmt(format_args!("object_id {}, ", self.object_id))?;
let _ = f.write_fmt(format_args!("base_rev_id {}, ", self.base_rev_id))?;
let _ = f.write_fmt(format_args!("rev_id {}, ", self.rev_id))?;
f.write_fmt(format_args!("object_id {}, ", self.object_id))?;
f.write_fmt(format_args!("base_rev_id {}, ", self.base_rev_id))?;
f.write_fmt(format_args!("rev_id {}, ", self.rev_id))?;
Ok(())
}
}

View File

@ -44,7 +44,7 @@ impl ClientRevisionWSData {
object_id: object_id.to_owned(),
ty: ClientRevisionWSDataType::ClientPing,
revisions: vec![],
rev_id: rev_id,
rev_id,
}
}
}
@ -61,7 +61,7 @@ impl std::convert::TryFrom<ClientRevisionWSData> for Bytes {
type Error = serde_json::Error;
fn try_from(bytes: ClientRevisionWSData) -> Result<Self, Self::Error> {
serde_json::to_vec(&bytes).map(|bytes| Bytes::from(bytes))
serde_json::to_vec(&bytes).map(Bytes::from)
}
}
@ -91,7 +91,7 @@ impl std::convert::TryFrom<ServerRevisionWSData> for Bytes {
type Error = serde_json::Error;
fn try_from(bytes: ServerRevisionWSData) -> Result<Self, Self::Error> {
serde_json::to_vec(&bytes).map(|bytes| Bytes::from(bytes))
serde_json::to_vec(&bytes).map(Bytes::from)
}
}

View File

@ -124,7 +124,7 @@ impl AttributeHashMap {
impl Display for AttributeHashMap {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for (key, value) in self.0.iter() {
let _ = f.write_str(&format!("{:?}:{:?}", key, value))?;
f.write_str(&format!("{:?}:{:?}", key, value))?;
}
Ok(())
}

View File

@ -206,7 +206,7 @@ pub struct OpMetric();
impl Metric for OpMetric {
fn seek<T: OperationAttributes>(cursor: &mut OperationsCursor<T>, op_offset: usize) -> SeekResult {
let _ = check_bound(cursor.op_offset, op_offset)?;
check_bound(cursor.op_offset, op_offset)?;
let mut seek_cursor = OperationsCursor::new(cursor.delta, cursor.origin_iv);
while let Some((_, op)) = seek_cursor.iter.next() {
@ -226,7 +226,7 @@ pub struct Utf16CodeUnitMetric();
impl Metric for Utf16CodeUnitMetric {
fn seek<T: OperationAttributes>(cursor: &mut OperationsCursor<T>, offset: usize) -> SeekResult {
if offset > 0 {
let _ = check_bound(cursor.consume_count, offset)?;
check_bound(cursor.consume_count, offset)?;
let _ = cursor.next_with_len(Some(offset));
}

View File

@ -113,9 +113,9 @@ where
{
let len = false as usize + 1 + if self.attributes.is_empty() { 0 } else { 1 };
let mut serde_state = serializer.serialize_struct("Retain", len)?;
let _ = serde::ser::SerializeStruct::serialize_field(&mut serde_state, "retain", &self.n)?;
serde::ser::SerializeStruct::serialize_field(&mut serde_state, "retain", &self.n)?;
if !self.attributes.is_empty() {
let _ = serde::ser::SerializeStruct::serialize_field(&mut serde_state, "attributes", &self.attributes)?;
serde::ser::SerializeStruct::serialize_field(&mut serde_state, "attributes", &self.attributes)?;
}
serde::ser::SerializeStruct::end(serde_state)
}
@ -216,9 +216,9 @@ where
{
let len = false as usize + 1 + if self.attributes.is_empty() { 0 } else { 1 };
let mut serde_state = serializer.serialize_struct("Insert", len)?;
let _ = serde::ser::SerializeStruct::serialize_field(&mut serde_state, "insert", &self.s)?;
serde::ser::SerializeStruct::serialize_field(&mut serde_state, "insert", &self.s)?;
if !self.attributes.is_empty() {
let _ = serde::ser::SerializeStruct::serialize_field(&mut serde_state, "attributes", &self.attributes)?;
serde::ser::SerializeStruct::serialize_field(&mut serde_state, "attributes", &self.attributes)?;
}
serde::ser::SerializeStruct::end(serde_state)
}

View File

@ -12,7 +12,7 @@ impl Serialize for NodeOperations {
let operations = self.values();
let mut seq = serializer.serialize_seq(Some(operations.len()))?;
for operation in operations {
let _ = seq.serialize_element(&operation)?;
seq.serialize_element(&operation)?;
}
seq.end()
}

View File

@ -249,18 +249,7 @@ impl TransactionBuilder {
}
fn get_deleted_node_data(&self, node_tree: &NodeTree, node_id: NodeId) -> NodeData {
let node_data = node_tree.get_node(node_id).unwrap();
let mut children = vec![];
node_tree.get_children_ids(node_id).into_iter().for_each(|child_id| {
children.push(self.get_deleted_node_data(node_tree, child_id));
});
NodeData {
node_type: node_data.node_type.clone(),
attributes: node_data.attributes.clone(),
body: node_data.body.clone(),
children,
}
recursive_get_deleted_node_data(node_tree, node_id)
}
pub fn push(mut self, op: NodeOperation) -> Self {
@ -272,3 +261,19 @@ impl TransactionBuilder {
Transaction::from_operations(self.operations)
}
}
fn recursive_get_deleted_node_data(node_tree: &NodeTree, node_id: NodeId) -> NodeData {
let node_data = node_tree.get_node(node_id).unwrap();
let mut children = vec![];
node_tree.get_children_ids(node_id).into_iter().for_each(|child_id| {
let child = recursive_get_deleted_node_data(node_tree, child_id);
children.push(child);
});
NodeData {
node_type: node_data.node_type.clone(),
attributes: node_data.attributes.clone(),
body: node_data.body.clone(),
children,
}
}

View File

@ -31,7 +31,7 @@ impl NodeTree {
pub fn from_node_data(node_data: NodeData, context: NodeTreeContext) -> Result<Self, OTError> {
let mut tree = Self::new(context);
let _ = tree.insert_nodes(&0_usize.into(), vec![node_data])?;
tree.insert_nodes(&0_usize.into(), vec![node_data])?;
Ok(tree)
}
@ -68,7 +68,7 @@ impl NodeTree {
let operations = operations.into();
let mut node_tree = NodeTree::new(context);
for (_, operation) in operations.into_inner().into_iter().enumerate() {
let _ = node_tree.apply_op(operation)?;
node_tree.apply_op(operation)?;
}
Ok(node_tree)
}
@ -76,7 +76,7 @@ impl NodeTree {
pub fn from_transaction<T: Into<Transaction>>(transaction: T, context: NodeTreeContext) -> Result<Self, OTError> {
let transaction = transaction.into();
let mut tree = Self::new(context);
let _ = tree.apply_transaction(transaction)?;
tree.apply_transaction(transaction)?;
Ok(tree)
}
@ -473,7 +473,7 @@ impl NodeTree {
None => tracing::warn!("The path: {:?} does not contain any nodes", path),
Some(node) => {
let node = node.get_mut();
let _ = f(node)?;
f(node)?;
}
}
Ok(())

View File

@ -26,7 +26,7 @@ impl Serialize for NodeTree {
let mut seq = serializer.serialize_seq(Some(children.len()))?;
for child in children {
if let Some(child_node_data) = self.get_node_data(child) {
let _ = seq.serialize_element(&child_node_data)?;
seq.serialize_element(&child_node_data)?;
}
}
seq.end()

View File

@ -4,7 +4,7 @@ macro_rules! inline_attribute_entry {
$key: ident,
$value: ty
) => {
pub fn $key(value: $value) -> crate::core::AttributeEntry {
pub fn $key(value: $value) -> $crate::core::AttributeEntry {
AttributeEntry {
key: BuildInTextAttributeKey::$key.as_ref().to_string(),
value: value.into(),
@ -19,7 +19,7 @@ macro_rules! inline_list_attribute_entry {
$key: ident,
$value: expr
) => {
pub fn $key(b: bool) -> crate::core::AttributeEntry {
pub fn $key(b: bool) -> $crate::core::AttributeEntry {
let value = match b {
true => $value,
false => "",

View File

@ -10,7 +10,7 @@ fn transaction_compose_update_after_insert_test() {
// Modify the same path, the operations will be merged after composing if possible.
let mut transaction_a = TransactionBuilder::new().insert_node_at_path(0, node_data).build();
let transaction_b = TransactionBuilder::new().update_node_at_path(0, changeset).build();
let _ = transaction_a.compose(transaction_b).unwrap();
transaction_a.compose(transaction_b).unwrap();
// The operations are merged into one operation
assert_eq!(transaction_a.operations.len(), 1);
@ -46,14 +46,14 @@ fn transaction_compose_multiple_update_test() {
let inverted = Transaction::from_operations(other_transaction.operations.inverted());
// the update operation will be merged into insert operation
let _ = transaction.compose(other_transaction).unwrap();
transaction.compose(other_transaction).unwrap();
assert_eq!(transaction.operations.len(), 1);
assert_eq!(
transaction.to_json().unwrap(),
r#"{"operations":[{"op":"insert","path":[0],"nodes":[{"type":"text","body":{"delta":[{"insert":"Hello world😁"}]}}]}]}"#
);
let _ = transaction.compose(inverted).unwrap();
transaction.compose(inverted).unwrap();
assert_eq!(
transaction.to_json().unwrap(),
r#"{"operations":[{"op":"insert","path":[0],"nodes":[{"type":"text","body":{"delta":[{"insert":"Hello"}]}}]}]}"#

View File

@ -232,8 +232,7 @@ pub struct WSSender(MsgSender);
impl WSSender {
pub fn send_msg<T: Into<WebSocketRawMessage>>(&self, msg: T) -> Result<(), WSError> {
let msg = msg.into();
let _ = self
.0
self.0
.unbounded_send(msg.into())
.map_err(|e| WSError::internal().context(e))?;
Ok(())
@ -261,7 +260,7 @@ impl WSSender {
reason: reason.to_owned().into(),
};
let msg = Message::Close(Some(frame));
let _ = self.0.unbounded_send(msg).map_err(|e| WSError::internal().context(e))?;
self.0.unbounded_send(msg).map_err(|e| WSError::internal().context(e))?;
Ok(())
}
}