Feat: add appflowy editor in backend (#1320)

* chore: remove update attributes

* chore: format code

* chore: extension for transaction

* refactor: add document editor trait

* chore: add appflowy_document editor

* chore: add document serde

* chore: add new document editor

* chore: add tests

* chore: add more test

* chore: add test

Co-authored-by: nathan <nathan@appflowy.io>
This commit is contained in:
Nathan.fooo
2022-10-20 11:35:11 +08:00
committed by GitHub
parent 833a6cd95f
commit f1a5726fcb
81 changed files with 2367 additions and 902 deletions

View File

@ -1154,6 +1154,7 @@ dependencies = [
"claim 0.4.0",
"claim 0.5.0",
"fake",
"flowy-document",
"flowy-folder",
"flowy-net",
"flowy-sdk",

View File

@ -23,7 +23,7 @@ pub extern "C" fn init_sdk(path: *mut c_char) -> i64 {
let path: &str = c_str.to_str().unwrap();
let server_config = get_client_server_configuration().unwrap();
let config = FlowySDKConfig::new(path, server_config, "appflowy").log_filter("debug");
let config = FlowySDKConfig::new(path, "appflowy", server_config, false).log_filter("debug");
FLOWY_SDK.get_or_init(|| FlowySDK::new(config));
0

View File

@ -0,0 +1,99 @@
use bytes::Bytes;
use flowy_error::{FlowyError, FlowyResult};
use flowy_revision::{RevisionObjectDeserializer, RevisionObjectSerializer};
use flowy_sync::entities::revision::Revision;
use lib_ot::core::{Body, Extension, Interval, NodeDataBuilder, NodeOperation, NodeTree, NodeTreeContext, Transaction};
use lib_ot::text_delta::TextOperationBuilder;
#[derive(Debug)]
pub struct Document {
tree: NodeTree,
}
impl Document {
pub fn new(tree: NodeTree) -> Self {
Self { tree }
}
pub fn from_transaction(transaction: Transaction) -> FlowyResult<Self> {
let tree = NodeTree::from_operations(transaction.operations, make_tree_context())?;
Ok(Self { tree })
}
pub fn get_content(&self, pretty: bool) -> FlowyResult<String> {
if pretty {
serde_json::to_string_pretty(self).map_err(|err| FlowyError::serde().context(err))
} else {
serde_json::to_string(self).map_err(|err| FlowyError::serde().context(err))
}
}
pub fn get_tree(&self) -> &NodeTree {
&self.tree
}
}
pub(crate) fn make_tree_context() -> NodeTreeContext {
NodeTreeContext {}
}
pub fn initial_document_content() -> String {
let delta = TextOperationBuilder::new().insert("").build();
let node_data = NodeDataBuilder::new("text").insert_body(Body::Delta(delta)).build();
let editor_node = NodeDataBuilder::new("editor").add_node_data(node_data).build();
let node_operation = NodeOperation::Insert {
path: vec![0].into(),
nodes: vec![editor_node],
};
let extension = Extension::TextSelection {
before_selection: Interval::default(),
after_selection: Interval::default(),
};
let transaction = Transaction {
operations: vec![node_operation].into(),
extension,
};
transaction.to_json().unwrap()
}
impl std::ops::Deref for Document {
type Target = NodeTree;
fn deref(&self) -> &Self::Target {
&self.tree
}
}
impl std::ops::DerefMut for Document {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.tree
}
}
pub struct DocumentRevisionSerde();
impl RevisionObjectDeserializer for DocumentRevisionSerde {
type Output = Document;
fn deserialize_revisions(_object_id: &str, revisions: Vec<Revision>) -> FlowyResult<Self::Output> {
let mut tree = NodeTree::new(make_tree_context());
let transaction = make_transaction_from_revisions(revisions)?;
let _ = tree.apply_transaction(transaction)?;
let document = Document::new(tree);
Result::<Document, FlowyError>::Ok(document)
}
}
impl RevisionObjectSerializer for DocumentRevisionSerde {
fn combine_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
let transaction = make_transaction_from_revisions(revisions)?;
Ok(Bytes::from(transaction.to_bytes()?))
}
}
fn make_transaction_from_revisions(revisions: Vec<Revision>) -> FlowyResult<Transaction> {
let mut transaction = Transaction::new();
for revision in revisions {
let _ = transaction.compose(Transaction::from_bytes(&revision.bytes)?)?;
}
Ok(transaction)
}

View File

@ -0,0 +1,247 @@
use crate::editor::document::Document;
use lib_ot::core::{AttributeHashMap, Body, NodeData, NodeId, NodeTree};
use lib_ot::text_delta::TextOperations;
use serde::de::{self, MapAccess, Visitor};
use serde::ser::{SerializeMap, SerializeSeq};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::fmt;
impl Serialize for Document {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut map = serializer.serialize_map(Some(1))?;
let _ = map.serialize_key("document")?;
let _ = map.serialize_value(&DocumentContentSerializer(self))?;
map.end()
}
}
const FIELDS: &[&str] = &["Document"];
impl<'de> Deserialize<'de> for Document {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct DocumentVisitor();
impl<'de> Visitor<'de> for DocumentVisitor {
type Value = Document;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("Expect document tree")
}
fn visit_map<M>(self, mut map: M) -> Result<Document, M::Error>
where
M: MapAccess<'de>,
{
let mut node_tree = None;
while let Some(key) = map.next_key()? {
match key {
"document" => {
if node_tree.is_some() {
return Err(de::Error::duplicate_field("document"));
}
node_tree = Some(map.next_value::<NodeTree>()?)
}
s => {
return Err(de::Error::unknown_field(s, FIELDS));
}
}
}
match node_tree {
Some(tree) => Ok(Document::new(tree)),
None => Err(de::Error::missing_field("document")),
}
}
}
deserializer.deserialize_any(DocumentVisitor())
}
}
#[derive(Debug)]
struct DocumentContentSerializer<'a>(pub &'a Document);
#[derive(Default, Debug, Clone, Serialize, Deserialize, Eq, PartialEq)]
pub struct DocumentNodeData {
#[serde(rename = "type")]
pub node_type: String,
#[serde(skip_serializing_if = "AttributeHashMap::is_empty")]
#[serde(default)]
pub attributes: AttributeHashMap,
#[serde(skip_serializing_if = "TextOperations::is_empty")]
pub delta: TextOperations,
#[serde(skip_serializing_if = "Vec::is_empty")]
#[serde(default)]
pub children: Vec<DocumentNodeData>,
}
impl std::convert::From<NodeData> for DocumentNodeData {
fn from(node_data: NodeData) -> Self {
let delta = if let Body::Delta(operations) = node_data.body {
operations
} else {
TextOperations::default()
};
DocumentNodeData {
node_type: node_data.node_type,
attributes: node_data.attributes,
delta,
children: node_data.children.into_iter().map(DocumentNodeData::from).collect(),
}
}
}
impl<'a> Serialize for DocumentContentSerializer<'a> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let tree = self.0.get_tree();
let root_node_id = tree.root_node_id();
// transform the NodeData to DocumentNodeData
let get_document_node_data = |node_id: NodeId| tree.get_node_data(node_id).map(DocumentNodeData::from);
let mut children = tree.get_children_ids(root_node_id);
if children.len() == 1 {
let node_id = children.pop().unwrap();
match get_document_node_data(node_id) {
None => serializer.serialize_str(""),
Some(node_data) => node_data.serialize(serializer),
}
} else {
let mut seq = serializer.serialize_seq(Some(children.len()))?;
for child in children {
if let Some(node_data) = get_document_node_data(child) {
let _ = seq.serialize_element(&node_data)?;
}
}
seq.end()
}
}
}
#[cfg(test)]
mod tests {
use crate::editor::document::Document;
#[test]
fn document_serde_test() {
let document: Document = serde_json::from_str(EXAMPLE_DOCUMENT).unwrap();
let _ = serde_json::to_string_pretty(&document).unwrap();
}
const EXAMPLE_DOCUMENT: &str = r#"{
"document": {
"type": "editor",
"children": [
{
"type": "image",
"attributes": {
"image_src": "https://s1.ax1x.com/2022/08/26/v2sSbR.jpg",
"align": "center"
}
},
{
"type": "text",
"attributes": { "subtype": "heading", "heading": "h1" },
"delta": [
{ "insert": "👋 " },
{ "insert": "Welcome to ", "attributes": { "bold": true } },
{
"insert": "AppFlowy Editor",
"attributes": {
"href": "appflowy.io",
"italic": true,
"bold": true
}
}
]
},
{ "type": "text", "delta": [] },
{
"type": "text",
"delta": [
{ "insert": "AppFlowy Editor is a " },
{ "insert": "highly customizable", "attributes": { "bold": true } },
{ "insert": " " },
{ "insert": "rich-text editor", "attributes": { "italic": true } },
{ "insert": " for " },
{ "insert": "Flutter", "attributes": { "underline": true } }
]
},
{
"type": "text",
"attributes": { "checkbox": true, "subtype": "checkbox" },
"delta": [{ "insert": "Customizable" }]
},
{
"type": "text",
"attributes": { "checkbox": true, "subtype": "checkbox" },
"delta": [{ "insert": "Test-covered" }]
},
{
"type": "text",
"attributes": { "checkbox": false, "subtype": "checkbox" },
"delta": [{ "insert": "more to come!" }]
},
{ "type": "text", "delta": [] },
{
"type": "text",
"attributes": { "subtype": "quote" },
"delta": [{ "insert": "Here is an example you can give a try" }]
},
{ "type": "text", "delta": [] },
{
"type": "text",
"delta": [
{ "insert": "You can also use " },
{
"insert": "AppFlowy Editor",
"attributes": {
"italic": true,
"bold": true,
"backgroundColor": "0x6000BCF0"
}
},
{ "insert": " as a component to build your own app." }
]
},
{ "type": "text", "delta": [] },
{
"type": "text",
"attributes": { "subtype": "bulleted-list" },
"delta": [{ "insert": "Use / to insert blocks" }]
},
{
"type": "text",
"attributes": { "subtype": "bulleted-list" },
"delta": [
{
"insert": "Select text to trigger to the toolbar to format your notes."
}
]
},
{ "type": "text", "delta": [] },
{
"type": "text",
"delta": [
{
"insert": "If you have questions or feedback, please submit an issue on Github or join the community along with 1000+ builders!"
}
]
}
]
}
}
"#;
}

View File

@ -0,0 +1,92 @@
use crate::editor::document::{Document, DocumentRevisionSerde};
use crate::editor::queue::{Command, CommandSender, DocumentQueue};
use crate::{DocumentEditor, DocumentUser};
use bytes::Bytes;
use flowy_error::{internal_error, FlowyError, FlowyResult};
use flowy_revision::{RevisionCloudService, RevisionManager};
use flowy_sync::entities::ws_data::ServerRevisionWSData;
use lib_infra::future::FutureResult;
use lib_ot::core::Transaction;
use lib_ws::WSConnectState;
use std::any::Any;
use std::sync::Arc;
use tokio::sync::{mpsc, oneshot};
pub struct AppFlowyDocumentEditor {
#[allow(dead_code)]
doc_id: String,
command_sender: CommandSender,
}
impl AppFlowyDocumentEditor {
pub async fn new(
doc_id: &str,
user: Arc<dyn DocumentUser>,
mut rev_manager: RevisionManager,
cloud_service: Arc<dyn RevisionCloudService>,
) -> FlowyResult<Arc<Self>> {
let document = rev_manager.load::<DocumentRevisionSerde>(Some(cloud_service)).await?;
let rev_manager = Arc::new(rev_manager);
let command_sender = spawn_edit_queue(user, rev_manager, document);
let doc_id = doc_id.to_string();
let editor = Arc::new(Self { doc_id, command_sender });
Ok(editor)
}
pub async fn apply_transaction(&self, transaction: Transaction) -> FlowyResult<()> {
let (ret, rx) = oneshot::channel::<FlowyResult<()>>();
let _ = self
.command_sender
.send(Command::ComposeTransaction { transaction, ret })
.await;
let _ = rx.await.map_err(internal_error)??;
Ok(())
}
pub async fn get_content(&self, pretty: bool) -> FlowyResult<String> {
let (ret, rx) = oneshot::channel::<FlowyResult<String>>();
let _ = self
.command_sender
.send(Command::GetDocumentContent { pretty, ret })
.await;
let content = rx.await.map_err(internal_error)??;
Ok(content)
}
}
fn spawn_edit_queue(
user: Arc<dyn DocumentUser>,
rev_manager: Arc<RevisionManager>,
document: Document,
) -> CommandSender {
let (sender, receiver) = mpsc::channel(1000);
let queue = DocumentQueue::new(user, rev_manager, document, receiver);
tokio::spawn(queue.run());
sender
}
impl DocumentEditor for Arc<AppFlowyDocumentEditor> {
fn get_operations_str(&self) -> FutureResult<String, FlowyError> {
todo!()
}
fn compose_local_operations(&self, _data: Bytes) -> FutureResult<(), FlowyError> {
todo!()
}
fn close(&self) {
todo!()
}
fn receive_ws_data(&self, _data: ServerRevisionWSData) -> FutureResult<(), FlowyError> {
todo!()
}
fn receive_ws_state(&self, _state: &WSConnectState) {
todo!()
}
fn as_any(&self) -> &dyn Any {
self
}
}

View File

@ -0,0 +1,8 @@
#![allow(clippy::module_inception)]
mod document;
mod document_serde;
mod editor;
mod queue;
pub use document::*;
pub use editor::*;

View File

@ -0,0 +1,78 @@
use crate::editor::document::Document;
use crate::DocumentUser;
use async_stream::stream;
use flowy_error::FlowyError;
use flowy_revision::RevisionManager;
use futures::stream::StreamExt;
use lib_ot::core::Transaction;
use std::sync::Arc;
use tokio::sync::mpsc::{Receiver, Sender};
use tokio::sync::{oneshot, RwLock};
pub struct DocumentQueue {
#[allow(dead_code)]
user: Arc<dyn DocumentUser>,
document: Arc<RwLock<Document>>,
#[allow(dead_code)]
rev_manager: Arc<RevisionManager>,
receiver: Option<CommandReceiver>,
}
impl DocumentQueue {
pub fn new(
user: Arc<dyn DocumentUser>,
rev_manager: Arc<RevisionManager>,
document: Document,
receiver: CommandReceiver,
) -> Self {
let document = Arc::new(RwLock::new(document));
Self {
user,
document,
rev_manager,
receiver: Some(receiver),
}
}
pub async fn run(mut self) {
let mut receiver = self.receiver.take().expect("Only take once");
let stream = stream! {
loop {
match receiver.recv().await {
Some(msg) => yield msg,
None => break,
}
}
};
stream
.for_each(|command| async {
match self.handle_command(command).await {
Ok(_) => {}
Err(e) => tracing::debug!("[DocumentQueue]: {}", e),
}
})
.await;
}
async fn handle_command(&self, command: Command) -> Result<(), FlowyError> {
match command {
Command::ComposeTransaction { transaction, ret } => {
self.document.write().await.apply_transaction(transaction)?;
let _ = ret.send(Ok(()));
}
Command::GetDocumentContent { pretty, ret } => {
let content = self.document.read().await.get_content(pretty)?;
let _ = ret.send(Ok(content));
}
}
Ok(())
}
}
pub(crate) type CommandSender = Sender<Command>;
pub(crate) type CommandReceiver = Receiver<Command>;
pub(crate) type Ret<T> = oneshot::Sender<Result<T, FlowyError>>;
pub enum Command {
ComposeTransaction { transaction: Transaction, ret: Ret<()> },
GetDocumentContent { pretty: bool, ret: Ret<String> },
}

View File

@ -9,13 +9,13 @@ pub enum ExportType {
Link = 2,
}
impl std::default::Default for ExportType {
impl Default for ExportType {
fn default() -> Self {
ExportType::Text
}
}
impl std::convert::From<i32> for ExportType {
impl From<i32> for ExportType {
fn from(val: i32) -> Self {
match val {
0 => ExportType::Text,
@ -37,10 +37,6 @@ pub struct EditPayloadPB {
// Encode in JSON format
#[pb(index = 2)]
pub operations: String,
// Encode in JSON format
#[pb(index = 3)]
pub operations_str: String,
}
#[derive(Default)]
@ -49,9 +45,6 @@ pub struct EditParams {
// Encode in JSON format
pub operations: String,
// Encode in JSON format
pub operations_str: String,
}
impl TryInto<EditParams> for EditPayloadPB {
@ -60,7 +53,6 @@ impl TryInto<EditParams> for EditPayloadPB {
Ok(EditParams {
doc_id: self.doc_id,
operations: self.operations,
operations_str: self.operations_str,
})
}
}

View File

@ -12,7 +12,7 @@ pub(crate) async fn get_document_handler(
) -> DataResult<DocumentSnapshotPB, FlowyError> {
let document_id: DocumentIdPB = data.into_inner();
let editor = manager.open_document_editor(&document_id).await?;
let operations_str = editor.get_operation_str().await?;
let operations_str = editor.get_operations_str().await?;
data_result(DocumentSnapshotPB {
doc_id: document_id.into(),
snapshot: operations_str,
@ -35,7 +35,7 @@ pub(crate) async fn export_handler(
) -> DataResult<ExportDataPB, FlowyError> {
let params: ExportParams = data.into_inner().try_into()?;
let editor = manager.open_document_editor(&params.view_id).await?;
let operations_str = editor.get_operation_str().await?;
let operations_str = editor.get_operations_str().await?;
data_result(ExportDataPB {
data: operations_str,
export_type: params.export_type,

View File

@ -1,12 +1,12 @@
pub mod editor;
mod entities;
mod event_handler;
pub mod event_map;
pub mod manager;
mod queue;
mod web_socket;
pub mod editor;
pub mod old_editor;
pub mod protobuf;
pub use manager::*;
pub mod errors {
pub use flowy_error::{internal_error, ErrorCode, FlowyError};

View File

@ -1,6 +1,7 @@
use crate::editor::DocumentRevisionCompactor;
use crate::editor::{initial_document_content, AppFlowyDocumentEditor};
use crate::entities::EditParams;
use crate::{editor::DocumentEditor, errors::FlowyError, DocumentCloudService};
use crate::old_editor::editor::{DocumentRevisionCompress, OldDocumentEditor};
use crate::{errors::FlowyError, DocumentCloudService};
use bytes::Bytes;
use dashmap::DashMap;
use flowy_database::ConnectionPool;
@ -9,12 +10,16 @@ use flowy_revision::disk::SQLiteDocumentRevisionPersistence;
use flowy_revision::{
RevisionCloudService, RevisionManager, RevisionPersistence, RevisionWebSocket, SQLiteRevisionSnapshotPersistence,
};
use flowy_sync::client_document::initial_old_document_content;
use flowy_sync::entities::{
document::{DocumentIdPB, DocumentOperationsPB},
revision::{md5, RepeatedRevision, Revision},
ws_data::ServerRevisionWSData,
};
use lib_infra::future::FutureResult;
use lib_ws::WSConnectState;
use std::any::Any;
use std::{convert::TryInto, sync::Arc};
pub trait DocumentUser: Send + Sync {
@ -24,11 +29,36 @@ pub trait DocumentUser: Send + Sync {
fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError>;
}
pub trait DocumentEditor: Send + Sync {
fn get_operations_str(&self) -> FutureResult<String, FlowyError>;
fn compose_local_operations(&self, data: Bytes) -> FutureResult<(), FlowyError>;
fn close(&self);
fn receive_ws_data(&self, data: ServerRevisionWSData) -> FutureResult<(), FlowyError>;
fn receive_ws_state(&self, state: &WSConnectState);
/// Returns the `Any` reference that can be used to downcast back to the original,
/// concrete type.
///
/// The indirection through `as_any` is because using `downcast_ref`
/// on `Box<A>` *directly* only lets us downcast back to `&A` again. You can take a look at [this](https://stackoverflow.com/questions/33687447/how-to-get-a-reference-to-a-concrete-type-from-a-trait-object)
/// for more information.
///
///
fn as_any(&self) -> &dyn Any;
}
#[derive(Clone, Debug)]
pub struct DocumentConfig {
pub use_new_editor: bool,
}
pub struct DocumentManager {
cloud_service: Arc<dyn DocumentCloudService>,
rev_web_socket: Arc<dyn RevisionWebSocket>,
editor_map: Arc<DocumentEditorMap>,
user: Arc<dyn DocumentUser>,
config: DocumentConfig,
}
impl DocumentManager {
@ -36,12 +66,14 @@ impl DocumentManager {
cloud_service: Arc<dyn DocumentCloudService>,
document_user: Arc<dyn DocumentUser>,
rev_web_socket: Arc<dyn RevisionWebSocket>,
config: DocumentConfig,
) -> Self {
Self {
cloud_service,
rev_web_socket,
editor_map: Arc::new(DocumentEditorMap::new()),
user: document_user,
config,
}
}
@ -52,10 +84,13 @@ impl DocumentManager {
}
#[tracing::instrument(level = "trace", skip(self, editor_id), fields(editor_id), err)]
pub async fn open_document_editor<T: AsRef<str>>(&self, editor_id: T) -> Result<Arc<DocumentEditor>, FlowyError> {
pub async fn open_document_editor<T: AsRef<str>>(
&self,
editor_id: T,
) -> Result<Arc<dyn DocumentEditor>, FlowyError> {
let editor_id = editor_id.as_ref();
tracing::Span::current().record("editor_id", &editor_id);
self.get_document_editor(editor_id).await
self.init_document_editor(editor_id).await
}
#[tracing::instrument(level = "trace", skip(self, editor_id), fields(editor_id), err)]
@ -75,7 +110,7 @@ impl DocumentManager {
let _ = editor
.compose_local_operations(Bytes::from(payload.operations_str))
.await?;
let operations_str = editor.get_operation_str().await?;
let operations_str = editor.get_operations_str().await?;
Ok(DocumentOperationsPB {
doc_id: payload.doc_id.clone(),
operations_str,
@ -84,9 +119,7 @@ impl DocumentManager {
pub async fn apply_edit(&self, params: EditParams) -> FlowyResult<()> {
let editor = self.get_document_editor(&params.doc_id).await?;
let _ = editor
.compose_local_operations(Bytes::from(params.operations_str))
.await?;
let _ = editor.compose_local_operations(Bytes::from(params.operations)).await?;
Ok(())
}
@ -114,12 +147,18 @@ impl DocumentManager {
}
}
}
pub fn initial_document_content(&self) -> String {
if self.config.use_new_editor {
initial_document_content()
} else {
initial_old_document_content()
}
}
}
impl DocumentManager {
/// Returns the `DocumentEditor`
/// Initializes the document editor if it's not initialized yet. Otherwise, returns the opened
/// editor.
///
/// # Arguments
///
@ -127,12 +166,9 @@ impl DocumentManager {
///
/// returns: Result<Arc<DocumentEditor>, FlowyError>
///
async fn get_document_editor(&self, doc_id: &str) -> FlowyResult<Arc<DocumentEditor>> {
async fn get_document_editor(&self, doc_id: &str) -> FlowyResult<Arc<dyn DocumentEditor>> {
match self.editor_map.get(doc_id) {
None => {
let db_pool = self.user.db_pool()?;
self.init_document_editor(doc_id, db_pool).await
}
None => self.init_document_editor(doc_id).await,
Some(editor) => Ok(editor),
}
}
@ -146,12 +182,9 @@ impl DocumentManager {
///
/// returns: Result<Arc<DocumentEditor>, FlowyError>
///
#[tracing::instrument(level = "trace", skip(self, pool), err)]
async fn init_document_editor(
&self,
doc_id: &str,
pool: Arc<ConnectionPool>,
) -> Result<Arc<DocumentEditor>, FlowyError> {
#[tracing::instrument(level = "trace", skip(self), err)]
pub async fn init_document_editor(&self, doc_id: &str) -> Result<Arc<dyn DocumentEditor>, FlowyError> {
let pool = self.user.db_pool()?;
let user = self.user.clone();
let token = self.user.token()?;
let rev_manager = self.make_document_rev_manager(doc_id, pool.clone())?;
@ -159,8 +192,16 @@ impl DocumentManager {
token,
server: self.cloud_service.clone(),
});
let editor = DocumentEditor::new(doc_id, user, rev_manager, self.rev_web_socket.clone(), cloud_service).await?;
self.editor_map.insert(doc_id, &editor);
let editor: Arc<dyn DocumentEditor> = if self.config.use_new_editor {
let editor = AppFlowyDocumentEditor::new(doc_id, user, rev_manager, cloud_service).await?;
Arc::new(editor)
} else {
let editor =
OldDocumentEditor::new(doc_id, user, rev_manager, self.rev_web_socket.clone(), cloud_service).await?;
Arc::new(editor)
};
self.editor_map.insert(doc_id, editor.clone());
Ok(editor)
}
@ -174,7 +215,7 @@ impl DocumentManager {
let rev_persistence = RevisionPersistence::new(&user_id, doc_id, disk_cache);
// let history_persistence = SQLiteRevisionHistoryPersistence::new(doc_id, pool.clone());
let snapshot_persistence = SQLiteRevisionSnapshotPersistence::new(doc_id, pool);
let rev_compactor = DocumentRevisionCompactor();
let rev_compactor = DocumentRevisionCompress();
Ok(RevisionManager::new(
&user_id,
@ -222,7 +263,7 @@ impl RevisionCloudService for DocumentRevisionCloudService {
}
pub struct DocumentEditorMap {
inner: DashMap<String, Arc<DocumentEditor>>,
inner: DashMap<String, Arc<dyn DocumentEditor>>,
}
impl DocumentEditorMap {
@ -230,20 +271,20 @@ impl DocumentEditorMap {
Self { inner: DashMap::new() }
}
pub(crate) fn insert(&self, editor_id: &str, doc: &Arc<DocumentEditor>) {
pub(crate) fn insert(&self, editor_id: &str, editor: Arc<dyn DocumentEditor>) {
if self.inner.contains_key(editor_id) {
log::warn!("Doc:{} already exists in cache", editor_id);
log::warn!("Editor:{} already exists in cache", editor_id);
}
self.inner.insert(editor_id.to_string(), doc.clone());
self.inner.insert(editor_id.to_string(), editor);
}
pub(crate) fn get(&self, editor_id: &str) -> Option<Arc<DocumentEditor>> {
pub(crate) fn get(&self, editor_id: &str) -> Option<Arc<dyn DocumentEditor>> {
Some(self.inner.get(editor_id)?.clone())
}
pub(crate) fn remove(&self, editor_id: &str) {
if let Some(editor) = self.get(editor_id) {
editor.stop()
editor.close()
}
self.inner.remove(editor_id);
}

View File

@ -0,0 +1 @@

View File

@ -1,9 +1,7 @@
use crate::web_socket::EditorCommandSender;
use crate::{
errors::FlowyError,
queue::{EditDocumentQueue, EditorCommand},
DocumentUser,
};
#![allow(unused_attributes)]
#![allow(unused_attributes)]
use crate::old_editor::queue::{EditDocumentQueue, EditorCommand, EditorCommandSender};
use crate::{errors::FlowyError, DocumentEditor, DocumentUser};
use bytes::Bytes;
use flowy_error::{internal_error, FlowyResult};
use flowy_revision::{
@ -16,16 +14,18 @@ use flowy_sync::{
errors::CollaborateResult,
util::make_operations_from_revisions,
};
use lib_infra::future::FutureResult;
use lib_ot::core::{AttributeEntry, AttributeHashMap};
use lib_ot::{
core::{DeltaOperation, Interval},
text_delta::TextOperations,
};
use lib_ws::WSConnectState;
use std::any::Any;
use std::sync::Arc;
use tokio::sync::{mpsc, oneshot};
pub struct DocumentEditor {
pub struct OldDocumentEditor {
pub doc_id: String,
#[allow(dead_code)]
rev_manager: Arc<RevisionManager>,
@ -34,7 +34,7 @@ pub struct DocumentEditor {
edit_cmd_tx: EditorCommandSender,
}
impl DocumentEditor {
impl OldDocumentEditor {
#[allow(unused_variables)]
pub(crate) async fn new(
doc_id: &str,
@ -43,15 +43,17 @@ impl DocumentEditor {
rev_web_socket: Arc<dyn RevisionWebSocket>,
cloud_service: Arc<dyn RevisionCloudService>,
) -> FlowyResult<Arc<Self>> {
let document_info = rev_manager.load::<DocumentRevisionSerde>(Some(cloud_service)).await?;
let operations = TextOperations::from_bytes(&document_info.content)?;
let document = rev_manager
.load::<DeltaDocumentRevisionSerde>(Some(cloud_service))
.await?;
let operations = TextOperations::from_bytes(&document.content)?;
let rev_manager = Arc::new(rev_manager);
let doc_id = doc_id.to_string();
let user_id = user.user_id()?;
let edit_cmd_tx = spawn_edit_queue(user, rev_manager.clone(), operations);
#[cfg(feature = "sync")]
let ws_manager = crate::web_socket::make_document_ws_manager(
let ws_manager = crate::old_editor::web_socket::make_document_ws_manager(
doc_id.clone(),
user_id.clone(),
edit_cmd_tx.clone(),
@ -142,51 +144,60 @@ impl DocumentEditor {
let _ = rx.await.map_err(internal_error)??;
Ok(())
}
}
pub async fn get_operation_str(&self) -> FlowyResult<String> {
impl DocumentEditor for Arc<OldDocumentEditor> {
fn get_operations_str(&self) -> FutureResult<String, FlowyError> {
let (ret, rx) = oneshot::channel::<CollaborateResult<String>>();
let msg = EditorCommand::StringifyOperations { ret };
let _ = self.edit_cmd_tx.send(msg).await;
let json = rx.await.map_err(internal_error)??;
Ok(json)
let msg = EditorCommand::GetOperationsString { ret };
let edit_cmd_tx = self.edit_cmd_tx.clone();
FutureResult::new(async move {
let _ = edit_cmd_tx.send(msg).await;
let json = rx.await.map_err(internal_error)??;
Ok(json)
})
}
#[tracing::instrument(level = "trace", skip(self, data), err)]
pub(crate) async fn compose_local_operations(&self, data: Bytes) -> Result<(), FlowyError> {
let operations = TextOperations::from_bytes(&data)?;
let (ret, rx) = oneshot::channel::<CollaborateResult<()>>();
let msg = EditorCommand::ComposeLocalOperations { operations, ret };
let _ = self.edit_cmd_tx.send(msg).await;
let _ = rx.await.map_err(internal_error)??;
Ok(())
fn compose_local_operations(&self, data: Bytes) -> FutureResult<(), FlowyError> {
let edit_cmd_tx = self.edit_cmd_tx.clone();
FutureResult::new(async move {
let operations = TextOperations::from_bytes(&data)?;
let (ret, rx) = oneshot::channel::<CollaborateResult<()>>();
let msg = EditorCommand::ComposeLocalOperations { operations, ret };
let _ = edit_cmd_tx.send(msg).await;
let _ = rx.await.map_err(internal_error)??;
Ok(())
})
}
#[cfg(feature = "sync")]
pub fn stop(&self) {
fn close(&self) {
#[cfg(feature = "sync")]
self.ws_manager.stop();
}
#[cfg(not(feature = "sync"))]
pub fn stop(&self) {}
#[allow(unused_variables)]
fn receive_ws_data(&self, data: ServerRevisionWSData) -> FutureResult<(), FlowyError> {
let cloned_self = self.clone();
FutureResult::new(async move {
#[cfg(feature = "sync")]
let _ = cloned_self.ws_manager.receive_ws_data(data).await?;
#[cfg(feature = "sync")]
pub(crate) async fn receive_ws_data(&self, data: ServerRevisionWSData) -> Result<(), FlowyError> {
self.ws_manager.receive_ws_data(data).await
}
#[cfg(not(feature = "sync"))]
pub(crate) async fn receive_ws_data(&self, _data: ServerRevisionWSData) -> Result<(), FlowyError> {
Ok(())
Ok(())
})
}
#[cfg(feature = "sync")]
pub(crate) fn receive_ws_state(&self, state: &WSConnectState) {
#[allow(unused_variables)]
fn receive_ws_state(&self, state: &WSConnectState) {
#[cfg(feature = "sync")]
self.ws_manager.connect_state_changed(state.clone());
}
#[cfg(not(feature = "sync"))]
pub(crate) fn receive_ws_state(&self, _state: &WSConnectState) {}
}
impl std::ops::Drop for DocumentEditor {
fn as_any(&self) -> &dyn Any {
self
}
}
impl std::ops::Drop for OldDocumentEditor {
fn drop(&mut self) {
tracing::trace!("{} DocumentEditor was dropped", self.doc_id)
}
@ -214,10 +225,10 @@ fn spawn_edit_queue(
}
#[cfg(feature = "flowy_unit_test")]
impl DocumentEditor {
impl OldDocumentEditor {
pub async fn document_operations(&self) -> FlowyResult<TextOperations> {
let (ret, rx) = oneshot::channel::<CollaborateResult<TextOperations>>();
let msg = EditorCommand::ReadOperations { ret };
let msg = EditorCommand::GetOperations { ret };
let _ = self.edit_cmd_tx.send(msg).await;
let delta = rx.await.map_err(internal_error)??;
Ok(delta)
@ -228,8 +239,8 @@ impl DocumentEditor {
}
}
pub struct DocumentRevisionSerde();
impl RevisionObjectDeserializer for DocumentRevisionSerde {
pub struct DeltaDocumentRevisionSerde();
impl RevisionObjectDeserializer for DeltaDocumentRevisionSerde {
type Output = DocumentPayloadPB;
fn deserialize_revisions(object_id: &str, revisions: Vec<Revision>) -> FlowyResult<Self::Output> {
@ -246,17 +257,17 @@ impl RevisionObjectDeserializer for DocumentRevisionSerde {
}
}
impl RevisionObjectSerializer for DocumentRevisionSerde {
fn serialize_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
impl RevisionObjectSerializer for DeltaDocumentRevisionSerde {
fn combine_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
let operations = make_operations_from_revisions::<AttributeHashMap>(revisions)?;
Ok(operations.json_bytes())
}
}
pub(crate) struct DocumentRevisionCompactor();
impl RevisionCompress for DocumentRevisionCompactor {
fn serialize_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
DocumentRevisionSerde::serialize_revisions(revisions)
pub(crate) struct DocumentRevisionCompress();
impl RevisionCompress for DocumentRevisionCompress {
fn combine_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
DeltaDocumentRevisionSerde::combine_revisions(revisions)
}
}

View File

@ -0,0 +1,4 @@
pub mod conflict;
pub mod editor;
pub mod queue;
mod web_socket;

View File

@ -1,4 +1,4 @@
use crate::web_socket::{DocumentResolveOperations, EditorCommandReceiver};
use crate::old_editor::web_socket::DocumentResolveOperations;
use crate::DocumentUser;
use async_stream::stream;
use flowy_error::FlowyError;
@ -15,6 +15,7 @@ use lib_ot::{
text_delta::TextOperations,
};
use std::sync::Arc;
use tokio::sync::mpsc::{Receiver, Sender};
use tokio::sync::{oneshot, RwLock};
// The EditorCommandQueue executes each command that will alter the document in
@ -161,11 +162,11 @@ impl EditDocumentQueue {
let _ = self.save_local_operations(operations, md5).await?;
let _ = ret.send(Ok(()));
}
EditorCommand::StringifyOperations { ret } => {
EditorCommand::GetOperationsString { ret } => {
let data = self.document.read().await.get_operations_json();
let _ = ret.send(Ok(data));
}
EditorCommand::ReadOperations { ret } => {
EditorCommand::GetOperations { ret } => {
let operations = self.document.read().await.get_operations().clone();
let _ = ret.send(Ok(operations));
}
@ -184,7 +185,8 @@ impl EditDocumentQueue {
}
pub type TextTransformOperations = TransformOperations<DocumentResolveOperations>;
pub(crate) type EditorCommandSender = Sender<EditorCommand>;
pub(crate) type EditorCommandReceiver = Receiver<EditorCommand>;
pub(crate) type Ret<T> = oneshot::Sender<Result<T, CollaborateError>>;
pub(crate) enum EditorCommand {
@ -235,11 +237,11 @@ pub(crate) enum EditorCommand {
Redo {
ret: Ret<()>,
},
StringifyOperations {
GetOperationsString {
ret: Ret<String>,
},
#[allow(dead_code)]
ReadOperations {
GetOperations {
ret: Ret<TextOperations>,
},
}
@ -259,8 +261,8 @@ impl std::fmt::Debug for EditorCommand {
EditorCommand::CanRedo { .. } => "CanRedo",
EditorCommand::Undo { .. } => "Undo",
EditorCommand::Redo { .. } => "Redo",
EditorCommand::StringifyOperations { .. } => "StringifyOperations",
EditorCommand::ReadOperations { .. } => "ReadOperations",
EditorCommand::GetOperationsString { .. } => "StringifyOperations",
EditorCommand::GetOperations { .. } => "ReadOperations",
};
f.write_str(s)
}

View File

@ -1,9 +1,10 @@
use crate::queue::TextTransformOperations;
use crate::{queue::EditorCommand, TEXT_BLOCK_SYNC_INTERVAL_IN_MILLIS};
use crate::old_editor::queue::{EditorCommand, EditorCommandSender, TextTransformOperations};
use crate::TEXT_BLOCK_SYNC_INTERVAL_IN_MILLIS;
use bytes::Bytes;
use flowy_error::{internal_error, FlowyError, FlowyResult};
use flowy_revision::*;
use flowy_sync::entities::revision::Revision;
use flowy_sync::util::make_operations_from_revisions;
use flowy_sync::{
entities::{
revision::RevisionRange,
@ -12,19 +13,10 @@ use flowy_sync::{
errors::CollaborateResult,
};
use lib_infra::future::{BoxResultFuture, FutureResult};
use flowy_sync::util::make_operations_from_revisions;
use lib_ot::text_delta::TextOperations;
use lib_ws::WSConnectState;
use std::{sync::Arc, time::Duration};
use tokio::sync::{
broadcast,
mpsc::{Receiver, Sender},
oneshot,
};
pub(crate) type EditorCommandSender = Sender<EditorCommand>;
pub(crate) type EditorCommandReceiver = Receiver<EditorCommand>;
use tokio::sync::{broadcast, oneshot};
#[derive(Clone)]
pub struct DocumentResolveOperations(pub TextOperations);

View File

@ -1,2 +0,0 @@
mod script;
mod text_block_test;

View File

@ -1,6 +1,6 @@
#![cfg_attr(rustfmt, rustfmt::skip)]
use crate::editor::{TestBuilder, TestOp::*};
use flowy_sync::client_document::{NewlineDoc, EmptyDoc};
use flowy_sync::client_document::{NewlineDocument, EmptyDocument};
use lib_ot::core::{Interval, OperationTransform, NEW_LINE, WHITESPACE, OTString};
use unicode_segmentation::UnicodeSegmentation;
use lib_ot::text_delta::TextOperations;
@ -19,7 +19,7 @@ fn attributes_bold_added() {
]"#,
),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -31,7 +31,7 @@ fn attributes_bold_added_and_invert_all() {
Bold(0, Interval::new(0, 3), false),
AssertDocJson(0, r#"[{"insert":"123"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -43,7 +43,7 @@ fn attributes_bold_added_and_invert_partial_suffix() {
Bold(0, Interval::new(2, 4), false),
AssertDocJson(0, r#"[{"insert":"12","attributes":{"bold":true}},{"insert":"34"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -57,7 +57,7 @@ fn attributes_bold_added_and_invert_partial_suffix2() {
Bold(0, Interval::new(2, 4), true),
AssertDocJson(0, r#"[{"insert":"1234","attributes":{"bold":true}}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -85,7 +85,7 @@ fn attributes_bold_added_with_new_line() {
r#"[{"insert":"123","attributes":{"bold":true}},{"insert":"\na\n"},{"insert":"456","attributes":{"bold":true}},{"insert":"\n"}]"#,
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -97,7 +97,7 @@ fn attributes_bold_added_and_invert_partial_prefix() {
Bold(0, Interval::new(0, 2), false),
AssertDocJson(0, r#"[{"insert":"12"},{"insert":"34","attributes":{"bold":true}}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -109,7 +109,7 @@ fn attributes_bold_added_consecutive() {
Bold(0, Interval::new(1, 2), true),
AssertDocJson(0, r#"[{"insert":"12","attributes":{"bold":true}},{"insert":"34"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -128,7 +128,7 @@ fn attributes_bold_added_italic() {
r#"[{"insert":"12345678","attributes":{"bold":true,"italic":true}},{"insert":"\n"}]"#,
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -156,7 +156,7 @@ fn attributes_bold_added_italic2() {
),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -193,7 +193,7 @@ fn attributes_bold_added_italic3() {
),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -229,7 +229,7 @@ fn attributes_bold_added_italic_delete() {
AssertDocJson(0, r#"[{"insert":"67"},{"insert":"89","attributes":{"bold":true}}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -240,7 +240,7 @@ fn attributes_merge_inserted_text_with_same_attribute() {
InsertBold(0, "456", Interval::new(3, 6)),
AssertDocJson(0, r#"[{"insert":"123456","attributes":{"bold":true}}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -255,7 +255,7 @@ fn attributes_compose_attr_attributes_with_attr_attributes_test() {
AssertDocJson(1, r#"[{"insert":"1234567","attributes":{"bold":true}}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -296,7 +296,7 @@ fn attributes_compose_attr_attributes_with_attr_attributes_test2() {
),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -312,7 +312,7 @@ fn attributes_compose_attr_attributes_with_no_attr_attributes_test() {
AssertDocJson(0, expected),
AssertDocJson(1, expected),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -324,7 +324,7 @@ fn attributes_replace_heading() {
AssertDocJson(0, r#"[{"insert":"3456","attributes":{"bold":true}}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -336,7 +336,7 @@ fn attributes_replace_trailing() {
AssertDocJson(0, r#"[{"insert":"12345","attributes":{"bold":true}}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -350,7 +350,7 @@ fn attributes_replace_middle() {
AssertDocJson(0, r#"[{"insert":"34","attributes":{"bold":true}}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -362,7 +362,7 @@ fn attributes_replace_all() {
AssertDocJson(0, r#"[]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -374,7 +374,7 @@ fn attributes_replace_with_text() {
AssertDocJson(0, r#"[{"insert":"ab"},{"insert":"456","attributes":{"bold":true}}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -390,7 +390,7 @@ fn attributes_header_insert_newline_at_middle() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -415,7 +415,7 @@ fn attributes_header_insert_double_newline_at_middle() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -430,7 +430,7 @@ fn attributes_header_insert_newline_at_trailing() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -446,7 +446,7 @@ fn attributes_header_insert_double_newline_at_trailing() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -460,7 +460,7 @@ fn attributes_link_added() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -479,7 +479,7 @@ fn attributes_link_format_with_bold() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -498,7 +498,7 @@ fn attributes_link_insert_char_at_head() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -513,7 +513,7 @@ fn attributes_link_insert_char_at_middle() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -532,7 +532,7 @@ fn attributes_link_insert_char_at_trailing() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -547,7 +547,7 @@ fn attributes_link_insert_newline_at_middle() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -563,7 +563,7 @@ fn attributes_link_auto_format() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -579,7 +579,7 @@ fn attributes_link_auto_format_exist() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -595,7 +595,7 @@ fn attributes_link_auto_format_exist2() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -606,7 +606,7 @@ fn attributes_bullet_added() {
AssertDocJson(0, r#"[{"insert":"12"},{"insert":"\n","attributes":{"list":"bullet"}}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -627,7 +627,7 @@ fn attributes_bullet_added_2() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -644,7 +644,7 @@ fn attributes_bullet_remove_partial() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -660,7 +660,7 @@ fn attributes_bullet_auto_exit() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -700,7 +700,7 @@ fn attributes_preserve_block_when_insert_newline_inside() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -717,7 +717,7 @@ fn attributes_preserve_header_format_on_merge() {
AssertDocJson(0, r#"[{"insert":"123456"},{"insert":"\n","attributes":{"header":1}}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -737,7 +737,7 @@ fn attributes_format_emoji() {
r#"[{"insert":"👋 "},{"insert":"\n","attributes":{"header":1}}]"#,
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -757,7 +757,7 @@ fn attributes_preserve_list_format_on_merge() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -796,5 +796,5 @@ fn delta_compose() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}

View File

@ -1,6 +1,6 @@
#![allow(clippy::all)]
use crate::editor::{Rng, TestBuilder, TestOp::*};
use flowy_sync::client_document::{EmptyDoc, NewlineDoc};
use flowy_sync::client_document::{EmptyDocument, NewlineDocument};
use lib_ot::text_delta::TextOperationBuilder;
use lib_ot::{core::Interval, core::*, text_delta::TextOperations};
@ -11,7 +11,7 @@ fn attributes_insert_text() {
Insert(0, "456", 3),
AssertDocJson(0, r#"[{"insert":"123456"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -21,7 +21,7 @@ fn attributes_insert_text_at_head() {
Insert(0, "456", 0),
AssertDocJson(0, r#"[{"insert":"456123"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -31,7 +31,7 @@ fn attributes_insert_text_at_middle() {
Insert(0, "456", 1),
AssertDocJson(0, r#"[{"insert":"145623"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -528,7 +528,7 @@ fn transform_two_plain_delta() {
AssertDocJson(0, r#"[{"insert":"123456"}]"#),
AssertDocJson(1, r#"[{"insert":"123456"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -542,7 +542,7 @@ fn transform_two_plain_delta2() {
AssertDocJson(0, r#"[{"insert":"123456"}]"#),
AssertDocJson(1, r#"[{"insert":"123456"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -560,7 +560,7 @@ fn transform_two_non_seq_delta() {
AssertDocJson(0, r#"[{"insert":"123456"}]"#),
AssertDocJson(1, r#"[{"insert":"123456789"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -575,7 +575,7 @@ fn transform_two_conflict_non_seq_delta() {
AssertDocJson(0, r#"[{"insert":"123456"}]"#),
AssertDocJson(1, r#"[{"insert":"12378456"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -602,7 +602,7 @@ fn delta_invert_no_attribute_delta2() {
Invert(0, 1),
AssertDocJson(0, r#"[{"insert":"123"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -615,7 +615,7 @@ fn delta_invert_attribute_delta_with_no_attribute_delta() {
Invert(0, 1),
AssertDocJson(0, r#"[{"insert":"123","attributes":{"bold":true}}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -650,7 +650,7 @@ fn delta_invert_attribute_delta_with_no_attribute_delta2() {
]"#,
),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -663,7 +663,7 @@ fn delta_invert_no_attribute_delta_with_attribute_delta() {
Invert(0, 1),
AssertDocJson(0, r#"[{"insert":"123"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -682,7 +682,7 @@ fn delta_invert_no_attribute_delta_with_attribute_delta2() {
Invert(0, 1),
AssertDocJson(0, r#"[{"insert":"123"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -723,7 +723,7 @@ fn delta_invert_attribute_delta_with_attribute_delta() {
]"#,
),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -733,7 +733,7 @@ fn delta_compose_str() {
Insert(0, "2", 1),
AssertDocJson(0, r#"[{"insert":"12\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -746,5 +746,5 @@ fn delta_compose_with_missing_delta() {
AssertDocJson(0, r#"[{"insert":"1234\n"}]"#),
AssertStr(1, r#"4\n"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}

View File

@ -1,4 +1,4 @@
use flowy_sync::client_document::{ClientDocument, EmptyDoc};
use flowy_sync::client_document::{ClientDocument, EmptyDocument};
use lib_ot::text_delta::TextOperation;
use lib_ot::{
core::*,
@ -101,7 +101,7 @@ fn delta_deserialize_null_test() {
#[test]
fn document_insert_serde_test() {
let mut document = ClientDocument::new::<EmptyDoc>();
let mut document = ClientDocument::new::<EmptyDocument>();
document.insert(0, "\n").unwrap();
document.insert(0, "123").unwrap();
let json = document.get_operations_json();

View File

@ -1,11 +1,11 @@
use crate::editor::{TestBuilder, TestOp::*};
use flowy_sync::client_document::{EmptyDoc, NewlineDoc, RECORD_THRESHOLD};
use flowy_sync::client_document::{EmptyDocument, NewlineDocument, RECORD_THRESHOLD};
use lib_ot::core::{Interval, NEW_LINE, WHITESPACE};
#[test]
fn history_insert_undo() {
let ops = vec![Insert(0, "123", 0), Undo(0), AssertDocJson(0, r#"[{"insert":"\n"}]"#)];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -19,7 +19,7 @@ fn history_insert_undo_with_lagging() {
Undo(0),
AssertDocJson(0, r#"[{"insert":"\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -32,7 +32,7 @@ fn history_insert_redo() {
Redo(0),
AssertDocJson(0, r#"[{"insert":"123\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -51,7 +51,7 @@ fn history_insert_redo_with_lagging() {
Undo(0),
AssertDocJson(0, r#"[{"insert":"123\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -62,7 +62,7 @@ fn history_bold_undo() {
Undo(0),
AssertDocJson(0, r#"[{"insert":"\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -74,7 +74,7 @@ fn history_bold_undo_with_lagging() {
Undo(0),
AssertDocJson(0, r#"[{"insert":"123\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -87,7 +87,7 @@ fn history_bold_redo() {
Redo(0),
AssertDocJson(0, r#" [{"insert":"123","attributes":{"bold":true}},{"insert":"\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -101,7 +101,7 @@ fn history_bold_redo_with_lagging() {
Redo(0),
AssertDocJson(0, r#"[{"insert":"123","attributes":{"bold":true}},{"insert":"\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -115,7 +115,7 @@ fn history_delete_undo() {
Undo(0),
AssertDocJson(0, r#"[{"insert":"123"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -134,7 +134,7 @@ fn history_delete_undo_2() {
Undo(0),
AssertDocJson(0, r#"[{"insert":"\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -161,7 +161,7 @@ fn history_delete_undo_with_lagging() {
"#,
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -175,7 +175,7 @@ fn history_delete_redo() {
Redo(0),
AssertDocJson(0, r#"[{"insert":"\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -194,7 +194,7 @@ fn history_replace_undo() {
Undo(0),
AssertDocJson(0, r#"[{"insert":"\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -215,7 +215,7 @@ fn history_replace_undo_with_lagging() {
Undo(0),
AssertDocJson(0, r#"[{"insert":"123","attributes":{"bold":true}},{"insert":"\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -234,7 +234,7 @@ fn history_replace_redo() {
"#,
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -253,7 +253,7 @@ fn history_header_added_undo() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -272,7 +272,7 @@ fn history_link_added_undo() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -291,7 +291,7 @@ fn history_link_auto_format_undo_with_lagging() {
AssertDocJson(0, r#"[{"insert":"https://appflowy.io\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -314,7 +314,7 @@ fn history_bullet_undo() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -342,7 +342,7 @@ fn history_bullet_undo_with_lagging() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -369,5 +369,5 @@ fn history_undo_attribute_on_merge_between_line() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}

View File

@ -1,2 +1,3 @@
mod document;
mod editor;
mod new_document;
mod old_document;

View File

@ -0,0 +1,2 @@
mod script;
mod test;

View File

@ -0,0 +1,84 @@
use flowy_document::editor::AppFlowyDocumentEditor;
use flowy_test::helper::ViewTest;
use flowy_test::FlowySDKTest;
use lib_ot::core::{Body, Changeset, NodeDataBuilder, NodeOperation, Path, Transaction};
use lib_ot::text_delta::TextOperations;
use std::sync::Arc;
pub enum EditScript {
InsertText { path: Path, delta: TextOperations },
UpdateText { path: Path, delta: TextOperations },
Delete { path: Path },
AssertContent { expected: &'static str },
AssertPrettyContent { expected: &'static str },
}
pub struct DocumentEditorTest {
pub sdk: FlowySDKTest,
pub editor: Arc<AppFlowyDocumentEditor>,
}
impl DocumentEditorTest {
pub async fn new() -> Self {
let sdk = FlowySDKTest::new(true);
let _ = sdk.init_user().await;
let test = ViewTest::new_document_view(&sdk).await;
let document_editor = sdk.document_manager.open_document_editor(&test.view.id).await.unwrap();
let editor = match document_editor.as_any().downcast_ref::<Arc<AppFlowyDocumentEditor>>() {
None => panic!(),
Some(editor) => editor.clone(),
};
Self { sdk, editor }
}
pub async fn run_scripts(&self, scripts: Vec<EditScript>) {
for script in scripts {
self.run_script(script).await;
}
}
async fn run_script(&self, script: EditScript) {
match script {
EditScript::InsertText { path, delta } => {
let node_data = NodeDataBuilder::new("text").insert_body(Body::Delta(delta)).build();
let operation = NodeOperation::Insert {
path,
nodes: vec![node_data],
};
self.editor
.apply_transaction(Transaction::from_operations(vec![operation]))
.await
.unwrap();
}
EditScript::UpdateText { path, delta } => {
let inverted = delta.invert_str("");
let changeset = Changeset::Delta { delta, inverted };
let operation = NodeOperation::Update { path, changeset };
self.editor
.apply_transaction(Transaction::from_operations(vec![operation]))
.await
.unwrap();
}
EditScript::Delete { path } => {
let operation = NodeOperation::Delete { path, nodes: vec![] };
self.editor
.apply_transaction(Transaction::from_operations(vec![operation]))
.await
.unwrap();
}
EditScript::AssertContent { expected } => {
//
let content = self.editor.get_content(false).await.unwrap();
assert_eq!(content, expected);
}
EditScript::AssertPrettyContent { expected } => {
//
let content = self.editor.get_content(true).await.unwrap();
assert_eq!(content, expected);
}
}
}
}

View File

@ -0,0 +1,156 @@
use crate::new_document::script::DocumentEditorTest;
use crate::new_document::script::EditScript::*;
use lib_ot::text_delta::TextOperationBuilder;
#[tokio::test]
async fn document_initialize_test() {
let scripts = vec![AssertContent {
expected: r#"{"document":{"type":"editor","children":[{"type":"text"}]}}"#,
}];
DocumentEditorTest::new().await.run_scripts(scripts).await;
}
#[tokio::test]
async fn document_insert_text_test() {
let delta = TextOperationBuilder::new().insert("Hello world").build();
let expected = r#"{
"document": {
"type": "editor",
"children": [
{
"type": "text",
"delta": [
{
"insert": "Hello world"
}
]
},
{
"type": "text"
}
]
}
}"#;
let scripts = vec![
InsertText {
path: vec![0, 0].into(),
delta,
},
AssertPrettyContent { expected },
];
DocumentEditorTest::new().await.run_scripts(scripts).await;
}
#[tokio::test]
async fn document_update_text_test() {
let test = DocumentEditorTest::new().await;
let hello_world = "Hello world".to_string();
let scripts = vec![
UpdateText {
path: vec![0, 0].into(),
delta: TextOperationBuilder::new().insert(&hello_world).build(),
},
AssertPrettyContent {
expected: r#"{
"document": {
"type": "editor",
"children": [
{
"type": "text",
"delta": [
{
"insert": "Hello world"
}
]
}
]
}
}"#,
},
];
test.run_scripts(scripts).await;
let scripts = vec![
UpdateText {
path: vec![0, 0].into(),
delta: TextOperationBuilder::new()
.retain(hello_world.len())
.insert(", AppFlowy")
.build(),
},
AssertPrettyContent {
expected: r#"{
"document": {
"type": "editor",
"children": [
{
"type": "text",
"delta": [
{
"insert": "Hello world, AppFlowy"
}
]
}
]
}
}"#,
},
];
test.run_scripts(scripts).await;
}
#[tokio::test]
async fn document_delete_text_test() {
let expected = r#"{
"document": {
"type": "editor",
"children": [
{
"type": "text",
"delta": [
{
"insert": "Hello"
}
]
}
]
}
}"#;
let hello_world = "Hello world".to_string();
let scripts = vec![
UpdateText {
path: vec![0, 0].into(),
delta: TextOperationBuilder::new().insert(&hello_world).build(),
},
UpdateText {
path: vec![0, 0].into(),
delta: TextOperationBuilder::new().retain(5).delete(6).build(),
},
AssertPrettyContent { expected },
];
DocumentEditorTest::new().await.run_scripts(scripts).await;
}
#[tokio::test]
async fn document_delete_node_test() {
let scripts = vec![
UpdateText {
path: vec![0, 0].into(),
delta: TextOperationBuilder::new().insert("Hello world").build(),
},
AssertContent {
expected: r#"{"document":{"type":"editor","children":[{"type":"text","delta":[{"insert":"Hello world"}]}]}}"#,
},
Delete {
path: vec![0, 0].into(),
},
AssertContent {
expected: r#"{"document":{"type":"editor"}}"#,
},
];
DocumentEditorTest::new().await.run_scripts(scripts).await;
}

View File

@ -0,0 +1,2 @@
mod old_document_test;
mod script;

View File

@ -1,4 +1,4 @@
use crate::document::script::{EditorScript::*, *};
use crate::old_document::script::{EditorScript::*, *};
use flowy_revision::disk::RevisionState;
use lib_ot::core::{count_utf16_code_units, Interval};
@ -14,7 +14,7 @@ async fn text_block_sync_current_rev_id_check() {
AssertNextSyncRevId(None),
AssertJson(r#"[{"insert":"123\n"}]"#),
];
DocumentEditorTest::new().await.run_scripts(scripts).await;
OldDocumentEditorTest::new().await.run_scripts(scripts).await;
}
#[tokio::test]
@ -28,7 +28,7 @@ async fn text_block_sync_state_check() {
AssertRevisionState(3, RevisionState::Ack),
AssertJson(r#"[{"insert":"123\n"}]"#),
];
DocumentEditorTest::new().await.run_scripts(scripts).await;
OldDocumentEditorTest::new().await.run_scripts(scripts).await;
}
#[tokio::test]
@ -40,7 +40,7 @@ async fn text_block_sync_insert_test() {
AssertJson(r#"[{"insert":"123\n"}]"#),
AssertNextSyncRevId(None),
];
DocumentEditorTest::new().await.run_scripts(scripts).await;
OldDocumentEditorTest::new().await.run_scripts(scripts).await;
}
#[tokio::test]
@ -52,7 +52,7 @@ async fn text_block_sync_insert_in_chinese() {
InsertText("", offset),
AssertJson(r#"[{"insert":"你好\n"}]"#),
];
DocumentEditorTest::new().await.run_scripts(scripts).await;
OldDocumentEditorTest::new().await.run_scripts(scripts).await;
}
#[tokio::test]
@ -64,7 +64,7 @@ async fn text_block_sync_insert_with_emoji() {
InsertText("☺️", offset),
AssertJson(r#"[{"insert":"😁☺️\n"}]"#),
];
DocumentEditorTest::new().await.run_scripts(scripts).await;
OldDocumentEditorTest::new().await.run_scripts(scripts).await;
}
#[tokio::test]
@ -76,7 +76,7 @@ async fn text_block_sync_delete_in_english() {
Delete(Interval::new(0, 2)),
AssertJson(r#"[{"insert":"3\n"}]"#),
];
DocumentEditorTest::new().await.run_scripts(scripts).await;
OldDocumentEditorTest::new().await.run_scripts(scripts).await;
}
#[tokio::test]
@ -89,7 +89,7 @@ async fn text_block_sync_delete_in_chinese() {
Delete(Interval::new(0, offset)),
AssertJson(r#"[{"insert":"好\n"}]"#),
];
DocumentEditorTest::new().await.run_scripts(scripts).await;
OldDocumentEditorTest::new().await.run_scripts(scripts).await;
}
#[tokio::test]
@ -101,5 +101,5 @@ async fn text_block_sync_replace_test() {
Replace(Interval::new(0, 3), "abc"),
AssertJson(r#"[{"insert":"abc\n"}]"#),
];
DocumentEditorTest::new().await.run_scripts(scripts).await;
OldDocumentEditorTest::new().await.run_scripts(scripts).await;
}

View File

@ -1,4 +1,4 @@
use flowy_document::editor::DocumentEditor;
use flowy_document::old_editor::editor::OldDocumentEditor;
use flowy_document::TEXT_BLOCK_SYNC_INTERVAL_IN_MILLIS;
use flowy_revision::disk::RevisionState;
use flowy_test::{helper::ViewTest, FlowySDKTest};
@ -17,21 +17,21 @@ pub enum EditorScript {
AssertJson(&'static str),
}
pub struct DocumentEditorTest {
pub struct OldDocumentEditorTest {
pub sdk: FlowySDKTest,
pub editor: Arc<DocumentEditor>,
pub editor: Arc<OldDocumentEditor>,
}
impl DocumentEditorTest {
impl OldDocumentEditorTest {
pub async fn new() -> Self {
let sdk = FlowySDKTest::default();
let _ = sdk.init_user().await;
let test = ViewTest::new_text_block_view(&sdk).await;
let editor = sdk
.text_block_manager
.open_document_editor(&test.view.id)
.await
.unwrap();
let test = ViewTest::new_document_view(&sdk).await;
let document_editor = sdk.document_manager.open_document_editor(&test.view.id).await.unwrap();
let editor = match document_editor.as_any().downcast_ref::<Arc<OldDocumentEditor>>() {
None => panic!(),
Some(editor) => editor.clone(),
};
Self { sdk, editor }
}

View File

@ -67,6 +67,7 @@ impl FlowyError {
static_flowy_error!(text_too_long, ErrorCode::TextTooLong);
static_flowy_error!(invalid_data, ErrorCode::InvalidData);
static_flowy_error!(out_of_bounds, ErrorCode::OutOfBounds);
static_flowy_error!(serde, ErrorCode::Serde);
}
impl std::convert::From<ErrorCode> for FlowyError {

View File

@ -1,6 +1,6 @@
use crate::entities::view::ViewDataTypePB;
use crate::entities::ViewLayoutTypePB;
use crate::services::folder_editor::FolderRevisionCompactor;
use crate::services::folder_editor::FolderRevisionCompress;
use crate::{
dart_notification::{send_dart_notification, FolderNotification},
entities::workspace::RepeatedWorkspacePB,
@ -16,7 +16,8 @@ use flowy_error::FlowyError;
use flowy_folder_data_model::user_default;
use flowy_revision::disk::SQLiteDocumentRevisionPersistence;
use flowy_revision::{RevisionManager, RevisionPersistence, RevisionWebSocket, SQLiteRevisionSnapshotPersistence};
use flowy_sync::client_document::default::{initial_document_str, initial_read_me};
use flowy_sync::client_document::default::initial_read_me;
use flowy_sync::{client_folder::FolderPad, entities::ws_data::ServerRevisionWSData};
use lazy_static::lazy_static;
use lib_infra::future::FutureResult;
@ -166,7 +167,7 @@ impl FolderManager {
let object_id = folder_id.as_ref();
let disk_cache = SQLiteDocumentRevisionPersistence::new(user_id, pool.clone());
let rev_persistence = RevisionPersistence::new(user_id, object_id, disk_cache);
let rev_compactor = FolderRevisionCompactor();
let rev_compactor = FolderRevisionCompress();
// let history_persistence = SQLiteRevisionHistoryPersistence::new(object_id, pool.clone());
let snapshot_persistence = SQLiteRevisionSnapshotPersistence::new(object_id, pool);
let rev_manager = RevisionManager::new(
@ -215,16 +216,14 @@ impl DefaultFolderBuilder {
set_current_workspace(&workspace_rev.id);
for app in workspace_rev.apps.iter() {
for (index, view) in app.belongings.iter().enumerate() {
let view_data = if index == 0 {
initial_read_me().json_str()
} else {
initial_document_str()
};
let _ = view_controller.set_latest_view(&view.id);
let layout_type = ViewLayoutTypePB::from(view.layout.clone());
let _ = view_controller
.create_view(&view.id, ViewDataTypePB::Text, layout_type, Bytes::from(view_data))
.await?;
if index == 0 {
let view_data = initial_read_me().json_str();
let _ = view_controller.set_latest_view(&view.id);
let layout_type = ViewLayoutTypePB::from(view.layout.clone());
let _ = view_controller
.create_view(&view.id, ViewDataTypePB::Text, layout_type, Bytes::from(view_data))
.await?;
}
}
}
let folder = FolderPad::new(vec![workspace_rev.clone()], vec![])?;

View File

@ -112,16 +112,16 @@ impl RevisionObjectDeserializer for FolderRevisionSerde {
}
impl RevisionObjectSerializer for FolderRevisionSerde {
fn serialize_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
fn combine_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
let operations = make_operations_from_revisions::<EmptyAttributes>(revisions)?;
Ok(operations.json_bytes())
}
}
pub struct FolderRevisionCompactor();
impl RevisionCompress for FolderRevisionCompactor {
fn serialize_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
FolderRevisionSerde::serialize_revisions(revisions)
pub struct FolderRevisionCompress();
impl RevisionCompress for FolderRevisionCompress {
fn combine_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
FolderRevisionSerde::combine_revisions(revisions)
}
}

View File

@ -1,6 +1,6 @@
use crate::entities::GridLayout;
use crate::services::block_editor::GridBlockRevisionCompactor;
use crate::services::grid_editor::{GridRevisionCompactor, GridRevisionEditor};
use crate::services::block_editor::GridBlockRevisionCompress;
use crate::services::grid_editor::{GridRevisionCompress, GridRevisionEditor};
use crate::services::grid_view_manager::make_grid_view_rev_manager;
use crate::services::persistence::block_index::BlockIndexCache;
use crate::services::persistence::kv::GridKVPersistence;
@ -158,7 +158,7 @@ impl GridManager {
let disk_cache = SQLiteGridRevisionPersistence::new(&user_id, pool.clone());
let rev_persistence = RevisionPersistence::new(&user_id, grid_id, disk_cache);
let snapshot_persistence = SQLiteRevisionSnapshotPersistence::new(grid_id, pool);
let rev_compactor = GridRevisionCompactor();
let rev_compactor = GridRevisionCompress();
let rev_manager = RevisionManager::new(&user_id, grid_id, rev_persistence, rev_compactor, snapshot_persistence);
Ok(rev_manager)
}
@ -167,7 +167,7 @@ impl GridManager {
let user_id = self.grid_user.user_id()?;
let disk_cache = SQLiteGridBlockRevisionPersistence::new(&user_id, pool.clone());
let rev_persistence = RevisionPersistence::new(&user_id, block_id, disk_cache);
let rev_compactor = GridBlockRevisionCompactor();
let rev_compactor = GridBlockRevisionCompress();
let snapshot_persistence = SQLiteRevisionSnapshotPersistence::new(block_id, pool);
let rev_manager =
RevisionManager::new(&user_id, block_id, rev_persistence, rev_compactor, snapshot_persistence);

View File

@ -204,15 +204,15 @@ impl RevisionObjectDeserializer for GridBlockRevisionSerde {
}
impl RevisionObjectSerializer for GridBlockRevisionSerde {
fn serialize_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
fn combine_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
let operations = make_operations_from_revisions::<EmptyAttributes>(revisions)?;
Ok(operations.json_bytes())
}
}
pub struct GridBlockRevisionCompactor();
impl RevisionCompress for GridBlockRevisionCompactor {
fn serialize_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
GridBlockRevisionSerde::serialize_revisions(revisions)
pub struct GridBlockRevisionCompress();
impl RevisionCompress for GridBlockRevisionCompress {
fn combine_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
GridBlockRevisionSerde::combine_revisions(revisions)
}
}

View File

@ -1,7 +1,7 @@
use crate::dart_notification::{send_dart_notification, GridNotification};
use crate::entities::{CellChangesetPB, GridBlockChangesetPB, InsertedRowPB, RowPB};
use crate::manager::GridUser;
use crate::services::block_editor::{GridBlockRevisionCompactor, GridBlockRevisionEditor};
use crate::services::block_editor::{GridBlockRevisionCompress, GridBlockRevisionEditor};
use crate::services::persistence::block_index::BlockIndexCache;
use crate::services::row::{block_from_row_orders, make_row_from_row_rev, GridBlockSnapshot};
use dashmap::DashMap;
@ -274,7 +274,7 @@ async fn make_block_editor(user: &Arc<dyn GridUser>, block_id: &str) -> FlowyRes
let disk_cache = SQLiteGridBlockRevisionPersistence::new(&user_id, pool.clone());
let rev_persistence = RevisionPersistence::new(&user_id, block_id, disk_cache);
let rev_compactor = GridBlockRevisionCompactor();
let rev_compactor = GridBlockRevisionCompress();
let snapshot_persistence = SQLiteRevisionSnapshotPersistence::new(block_id, pool);
let rev_manager = RevisionManager::new(&user_id, block_id, rev_persistence, rev_compactor, snapshot_persistence);
GridBlockRevisionEditor::new(&user_id, &token, block_id, rev_manager).await

View File

@ -842,7 +842,7 @@ impl RevisionObjectDeserializer for GridRevisionSerde {
}
}
impl RevisionObjectSerializer for GridRevisionSerde {
fn serialize_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
fn combine_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
let operations = make_operations_from_revisions::<EmptyAttributes>(revisions)?;
Ok(operations.json_bytes())
}
@ -859,11 +859,11 @@ impl RevisionCloudService for GridRevisionCloudService {
}
}
pub struct GridRevisionCompactor();
pub struct GridRevisionCompress();
impl RevisionCompress for GridRevisionCompactor {
fn serialize_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
GridRevisionSerde::serialize_revisions(revisions)
impl RevisionCompress for GridRevisionCompress {
fn combine_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
GridRevisionSerde::combine_revisions(revisions)
}
}

View File

@ -488,16 +488,16 @@ impl RevisionObjectDeserializer for GridViewRevisionSerde {
}
impl RevisionObjectSerializer for GridViewRevisionSerde {
fn serialize_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
fn combine_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
let operations = make_operations_from_revisions::<EmptyAttributes>(revisions)?;
Ok(operations.json_bytes())
}
}
pub struct GridViewRevisionCompactor();
impl RevisionCompress for GridViewRevisionCompactor {
fn serialize_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
GridViewRevisionSerde::serialize_revisions(revisions)
pub struct GridViewRevisionCompress();
impl RevisionCompress for GridViewRevisionCompress {
fn combine_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
GridViewRevisionSerde::combine_revisions(revisions)
}
}

View File

@ -4,7 +4,7 @@ use crate::entities::{
};
use crate::manager::GridUser;
use crate::services::grid_editor_task::GridServiceTaskScheduler;
use crate::services::grid_view_editor::{GridViewRevisionCompactor, GridViewRevisionEditor};
use crate::services::grid_view_editor::{GridViewRevisionCompress, GridViewRevisionEditor};
use dashmap::DashMap;
use flowy_error::FlowyResult;
@ -250,7 +250,7 @@ pub async fn make_grid_view_rev_manager(user: &Arc<dyn GridUser>, view_id: &str)
let disk_cache = SQLiteGridViewRevisionPersistence::new(&user_id, pool.clone());
let rev_persistence = RevisionPersistence::new(&user_id, view_id, disk_cache);
let rev_compactor = GridViewRevisionCompactor();
let rev_compactor = GridViewRevisionCompress();
let snapshot_persistence = SQLiteRevisionSnapshotPersistence::new(view_id, pool);
Ok(RevisionManager::new(

View File

@ -4,7 +4,6 @@ use bytes::Bytes;
use flowy_error::{internal_error, FlowyError};
use flowy_folder::event_map::FolderCouldServiceV1;
use flowy_sync::{
client_document::default::initial_document_str,
entities::{
document::{CreateDocumentParams, DocumentIdPB, DocumentPayloadPB, ResetDocumentParams},
ws_data::{ClientRevisionWSData, ClientRevisionWSDataType},
@ -422,15 +421,9 @@ impl DocumentCloudService for LocalServer {
fn fetch_document(
&self,
_token: &str,
params: DocumentIdPB,
_params: DocumentIdPB,
) -> FutureResult<Option<DocumentPayloadPB>, FlowyError> {
let doc = DocumentPayloadPB {
doc_id: params.value,
content: initial_document_str(),
rev_id: 0,
base_rev_id: 0,
};
FutureResult::new(async { Ok(Some(doc)) })
FutureResult::new(async { Ok(None) })
}
fn update_document_content(&self, _token: &str, _params: ResetDocumentParams) -> FutureResult<(), FlowyError> {

View File

@ -33,11 +33,11 @@ pub trait RevisionObjectDeserializer: Send + Sync {
}
pub trait RevisionObjectSerializer: Send + Sync {
/// Serialize the list of revisions to `Bytes`
/// Serialize a list of revisions into one in `Bytes` format
///
/// * `revisions`: a list of revisions will be serialized to `Bytes`
///
fn serialize_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes>;
fn combine_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes>;
}
/// `RevisionCompress` is used to compress multiple revisions into one revision
@ -62,11 +62,11 @@ pub trait RevisionCompress: Send + Sync {
let (base_rev_id, rev_id) = first_revision.pair_rev_id();
let md5 = last_revision.md5.clone();
let bytes = self.serialize_revisions(revisions)?;
let bytes = self.combine_revisions(revisions)?;
Ok(Revision::new(object_id, base_rev_id, rev_id, bytes, user_id, md5))
}
fn serialize_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes>;
fn combine_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes>;
}
pub struct RevisionManager {

View File

@ -2,7 +2,7 @@ use bytes::Bytes;
use flowy_database::ConnectionPool;
use flowy_document::{
errors::{internal_error, FlowyError},
DocumentCloudService, DocumentManager, DocumentUser,
DocumentCloudService, DocumentConfig, DocumentManager, DocumentUser,
};
use flowy_net::ClientServerConfiguration;
use flowy_net::{
@ -23,6 +23,7 @@ impl DocumentDepsResolver {
ws_conn: Arc<FlowyWebSocketConnect>,
user_session: Arc<UserSession>,
server_config: &ClientServerConfiguration,
document_config: &DocumentConfig,
) -> Arc<DocumentManager> {
let user = Arc::new(BlockUserImpl(user_session));
let rev_web_socket = Arc::new(DocumentRevisionWebSocket(ws_conn.clone()));
@ -31,7 +32,12 @@ impl DocumentDepsResolver {
Some(local_server) => local_server,
};
let manager = Arc::new(DocumentManager::new(cloud_service, user, rev_web_socket));
let manager = Arc::new(DocumentManager::new(
cloud_service,
user,
rev_web_socket,
document_config.clone(),
));
let receiver = Arc::new(DocumentWSMessageReceiverImpl(manager.clone()));
ws_conn.add_ws_message_receiver(receiver).unwrap();

View File

@ -17,7 +17,6 @@ use flowy_net::{
http_server::folder::FolderHttpCloudService, local_server::LocalServer, ws::connection::FlowyWebSocketConnect,
};
use flowy_revision::{RevisionWebSocket, WSStateReceiver};
use flowy_sync::client_document::default::initial_document_str;
use flowy_sync::entities::revision::{RepeatedRevision, Revision};
use flowy_sync::entities::ws_data::ClientRevisionWSData;
use flowy_user::services::UserSession;
@ -175,7 +174,7 @@ impl ViewDataProcessor for DocumentViewDataProcessor {
let manager = self.0.clone();
FutureResult::new(async move {
let editor = manager.open_document_editor(view_id).await?;
let delta_bytes = Bytes::from(editor.get_operation_str().await?);
let delta_bytes = Bytes::from(editor.get_operations_str().await?);
Ok(delta_bytes)
})
}
@ -190,8 +189,8 @@ impl ViewDataProcessor for DocumentViewDataProcessor {
let user_id = user_id.to_string();
let view_id = view_id.to_string();
let manager = self.0.clone();
let view_data = self.0.initial_document_content();
FutureResult::new(async move {
let view_data = initial_document_str();
let delta_data = Bytes::from(view_data);
let repeated_revision: RepeatedRevision =
Revision::initial_revision(&user_id, &view_id, delta_data.clone()).into();

View File

@ -3,7 +3,7 @@ pub mod module;
pub use flowy_net::get_client_server_configuration;
use crate::deps_resolve::*;
use flowy_document::DocumentManager;
use flowy_document::{DocumentConfig, DocumentManager};
use flowy_folder::{errors::FlowyError, manager::FolderManager};
use flowy_grid::manager::GridManager;
use flowy_net::ClientServerConfiguration;
@ -34,24 +34,28 @@ pub struct FlowySDKConfig {
root: String,
log_filter: String,
server_config: ClientServerConfiguration,
document_config: DocumentConfig,
}
impl fmt::Debug for FlowySDKConfig {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("FlowySDKConfig")
.field("root", &self.root)
.field("server_config", &self.server_config)
.field("server-config", &self.server_config)
.field("document-config", &self.document_config)
.finish()
}
}
impl FlowySDKConfig {
pub fn new(root: &str, server_config: ClientServerConfiguration, name: &str) -> Self {
pub fn new(root: &str, name: &str, server_config: ClientServerConfiguration, use_new_editor: bool) -> Self {
let document_config = DocumentConfig { use_new_editor };
FlowySDKConfig {
name: name.to_owned(),
root: root.to_owned(),
log_filter: crate_log_filter("info".to_owned()),
server_config,
document_config,
}
}
@ -89,7 +93,7 @@ pub struct FlowySDK {
#[allow(dead_code)]
config: FlowySDKConfig,
pub user_session: Arc<UserSession>,
pub text_block_manager: Arc<DocumentManager>,
pub document_manager: Arc<DocumentManager>,
pub folder_manager: Arc<FolderManager>,
pub grid_manager: Arc<GridManager>,
pub dispatcher: Arc<EventDispatcher>,
@ -106,11 +110,12 @@ impl FlowySDK {
let (local_server, ws_conn) = mk_local_server(&config.server_config);
let (user_session, text_block_manager, folder_manager, local_server, grid_manager) = runtime.block_on(async {
let user_session = mk_user_session(&config, &local_server, &config.server_config);
let text_block_manager = DocumentDepsResolver::resolve(
let document_manager = DocumentDepsResolver::resolve(
local_server.clone(),
ws_conn.clone(),
user_session.clone(),
&config.server_config,
&config.document_config,
);
let grid_manager = GridDepsResolver::resolve(ws_conn.clone(), user_session.clone()).await;
@ -120,7 +125,7 @@ impl FlowySDK {
user_session.clone(),
&config.server_config,
&ws_conn,
&text_block_manager,
&document_manager,
&grid_manager,
)
.await;
@ -131,7 +136,7 @@ impl FlowySDK {
ws_conn.init().await;
(
user_session,
text_block_manager,
document_manager,
folder_manager,
local_server,
grid_manager,
@ -153,7 +158,7 @@ impl FlowySDK {
Self {
config,
user_session,
text_block_manager,
document_manager: text_block_manager,
folder_manager,
grid_manager,
dispatcher,

View File

@ -10,6 +10,7 @@ flowy-sdk = { path = "../flowy-sdk", default-features = false }
flowy-user = { path = "../flowy-user"}
flowy-net = { path = "../flowy-net"}
flowy-folder = { path = "../flowy-folder", default-features = false}
flowy-document= { path = "../flowy-document", default-features = false}
lib-dispatch = { path = "../lib-dispatch" }
flowy-sync = { path = "../../../shared-lib/flowy-sync" }

View File

@ -46,7 +46,7 @@ impl ViewTest {
Self::new(sdk, ViewDataTypePB::Database, ViewLayoutTypePB::Board, data).await
}
pub async fn new_text_block_view(sdk: &FlowySDKTest) -> Self {
pub async fn new_document_view(sdk: &FlowySDKTest) -> Self {
Self::new(sdk, ViewDataTypePB::Text, ViewLayoutTypePB::Document, vec![]).await
}
}

View File

@ -2,7 +2,8 @@ pub mod event_builder;
pub mod helper;
use crate::helper::*;
use flowy_net::{get_client_server_configuration, ClientServerConfiguration};
use flowy_net::get_client_server_configuration;
use flowy_sdk::{FlowySDK, FlowySDKConfig};
use flowy_user::entities::UserProfilePB;
use nanoid::nanoid;
@ -27,16 +28,14 @@ impl std::ops::Deref for FlowySDKTest {
impl std::default::Default for FlowySDKTest {
fn default() -> Self {
let server_config = get_client_server_configuration().unwrap();
let sdk = Self::new(server_config);
std::mem::forget(sdk.dispatcher());
sdk
Self::new(false)
}
}
impl FlowySDKTest {
pub fn new(server_config: ClientServerConfiguration) -> Self {
let config = FlowySDKConfig::new(&root_dir(), server_config, &nanoid!(6)).log_filter("info");
pub fn new(use_new_editor: bool) -> Self {
let server_config = get_client_server_configuration().unwrap();
let config = FlowySDKConfig::new(&root_dir(), &nanoid!(6), server_config, use_new_editor).log_filter("info");
let sdk = std::thread::spawn(|| FlowySDK::new(config)).join().unwrap();
std::mem::forget(sdk.dispatcher());
Self { inner: sdk }