Feat: add appflowy editor in backend (#1320)

* chore: remove update attributes

* chore: format code

* chore: extension for transaction

* refactor: add document editor trait

* chore: add appflowy_document editor

* chore: add document serde

* chore: add new document editor

* chore: add tests

* chore: add more test

* chore: add test

Co-authored-by: nathan <nathan@appflowy.io>
This commit is contained in:
Nathan.fooo 2022-10-20 11:35:11 +08:00 committed by GitHub
parent 833a6cd95f
commit f1a5726fcb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
81 changed files with 2367 additions and 902 deletions

View File

@ -115,7 +115,7 @@ class DocumentBloc extends Bloc<DocumentEvent, DocumentState> {
void _composeDelta(Delta composedDelta, Delta documentDelta) async {
final json = jsonEncode(composedDelta.toJson());
Log.debug("doc_id: $view.id - Send json: $json");
final result = await service.applyEdit(docId: view.id, data: json);
final result = await service.applyEdit(docId: view.id, operations: json);
result.fold(
(_) {},

View File

@ -18,13 +18,11 @@ class DocumentService {
Future<Either<Unit, FlowyError>> applyEdit({
required String docId,
required String data,
String operations = "",
required String operations,
}) {
final payload = EditPayloadPB.create()
..docId = docId
..operations = operations
..operationsStr = data;
..operations = operations;
return DocumentEventApplyEdit(payload).send();
}

View File

@ -1154,6 +1154,7 @@ dependencies = [
"claim 0.4.0",
"claim 0.5.0",
"fake",
"flowy-document",
"flowy-folder",
"flowy-net",
"flowy-sdk",

View File

@ -23,7 +23,7 @@ pub extern "C" fn init_sdk(path: *mut c_char) -> i64 {
let path: &str = c_str.to_str().unwrap();
let server_config = get_client_server_configuration().unwrap();
let config = FlowySDKConfig::new(path, server_config, "appflowy").log_filter("debug");
let config = FlowySDKConfig::new(path, "appflowy", server_config, false).log_filter("debug");
FLOWY_SDK.get_or_init(|| FlowySDK::new(config));
0

View File

@ -0,0 +1,99 @@
use bytes::Bytes;
use flowy_error::{FlowyError, FlowyResult};
use flowy_revision::{RevisionObjectDeserializer, RevisionObjectSerializer};
use flowy_sync::entities::revision::Revision;
use lib_ot::core::{Body, Extension, Interval, NodeDataBuilder, NodeOperation, NodeTree, NodeTreeContext, Transaction};
use lib_ot::text_delta::TextOperationBuilder;
#[derive(Debug)]
pub struct Document {
tree: NodeTree,
}
impl Document {
pub fn new(tree: NodeTree) -> Self {
Self { tree }
}
pub fn from_transaction(transaction: Transaction) -> FlowyResult<Self> {
let tree = NodeTree::from_operations(transaction.operations, make_tree_context())?;
Ok(Self { tree })
}
pub fn get_content(&self, pretty: bool) -> FlowyResult<String> {
if pretty {
serde_json::to_string_pretty(self).map_err(|err| FlowyError::serde().context(err))
} else {
serde_json::to_string(self).map_err(|err| FlowyError::serde().context(err))
}
}
pub fn get_tree(&self) -> &NodeTree {
&self.tree
}
}
pub(crate) fn make_tree_context() -> NodeTreeContext {
NodeTreeContext {}
}
pub fn initial_document_content() -> String {
let delta = TextOperationBuilder::new().insert("").build();
let node_data = NodeDataBuilder::new("text").insert_body(Body::Delta(delta)).build();
let editor_node = NodeDataBuilder::new("editor").add_node_data(node_data).build();
let node_operation = NodeOperation::Insert {
path: vec![0].into(),
nodes: vec![editor_node],
};
let extension = Extension::TextSelection {
before_selection: Interval::default(),
after_selection: Interval::default(),
};
let transaction = Transaction {
operations: vec![node_operation].into(),
extension,
};
transaction.to_json().unwrap()
}
impl std::ops::Deref for Document {
type Target = NodeTree;
fn deref(&self) -> &Self::Target {
&self.tree
}
}
impl std::ops::DerefMut for Document {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.tree
}
}
pub struct DocumentRevisionSerde();
impl RevisionObjectDeserializer for DocumentRevisionSerde {
type Output = Document;
fn deserialize_revisions(_object_id: &str, revisions: Vec<Revision>) -> FlowyResult<Self::Output> {
let mut tree = NodeTree::new(make_tree_context());
let transaction = make_transaction_from_revisions(revisions)?;
let _ = tree.apply_transaction(transaction)?;
let document = Document::new(tree);
Result::<Document, FlowyError>::Ok(document)
}
}
impl RevisionObjectSerializer for DocumentRevisionSerde {
fn combine_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
let transaction = make_transaction_from_revisions(revisions)?;
Ok(Bytes::from(transaction.to_bytes()?))
}
}
fn make_transaction_from_revisions(revisions: Vec<Revision>) -> FlowyResult<Transaction> {
let mut transaction = Transaction::new();
for revision in revisions {
let _ = transaction.compose(Transaction::from_bytes(&revision.bytes)?)?;
}
Ok(transaction)
}

View File

@ -0,0 +1,247 @@
use crate::editor::document::Document;
use lib_ot::core::{AttributeHashMap, Body, NodeData, NodeId, NodeTree};
use lib_ot::text_delta::TextOperations;
use serde::de::{self, MapAccess, Visitor};
use serde::ser::{SerializeMap, SerializeSeq};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::fmt;
impl Serialize for Document {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut map = serializer.serialize_map(Some(1))?;
let _ = map.serialize_key("document")?;
let _ = map.serialize_value(&DocumentContentSerializer(self))?;
map.end()
}
}
const FIELDS: &[&str] = &["Document"];
impl<'de> Deserialize<'de> for Document {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct DocumentVisitor();
impl<'de> Visitor<'de> for DocumentVisitor {
type Value = Document;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("Expect document tree")
}
fn visit_map<M>(self, mut map: M) -> Result<Document, M::Error>
where
M: MapAccess<'de>,
{
let mut node_tree = None;
while let Some(key) = map.next_key()? {
match key {
"document" => {
if node_tree.is_some() {
return Err(de::Error::duplicate_field("document"));
}
node_tree = Some(map.next_value::<NodeTree>()?)
}
s => {
return Err(de::Error::unknown_field(s, FIELDS));
}
}
}
match node_tree {
Some(tree) => Ok(Document::new(tree)),
None => Err(de::Error::missing_field("document")),
}
}
}
deserializer.deserialize_any(DocumentVisitor())
}
}
#[derive(Debug)]
struct DocumentContentSerializer<'a>(pub &'a Document);
#[derive(Default, Debug, Clone, Serialize, Deserialize, Eq, PartialEq)]
pub struct DocumentNodeData {
#[serde(rename = "type")]
pub node_type: String,
#[serde(skip_serializing_if = "AttributeHashMap::is_empty")]
#[serde(default)]
pub attributes: AttributeHashMap,
#[serde(skip_serializing_if = "TextOperations::is_empty")]
pub delta: TextOperations,
#[serde(skip_serializing_if = "Vec::is_empty")]
#[serde(default)]
pub children: Vec<DocumentNodeData>,
}
impl std::convert::From<NodeData> for DocumentNodeData {
fn from(node_data: NodeData) -> Self {
let delta = if let Body::Delta(operations) = node_data.body {
operations
} else {
TextOperations::default()
};
DocumentNodeData {
node_type: node_data.node_type,
attributes: node_data.attributes,
delta,
children: node_data.children.into_iter().map(DocumentNodeData::from).collect(),
}
}
}
impl<'a> Serialize for DocumentContentSerializer<'a> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let tree = self.0.get_tree();
let root_node_id = tree.root_node_id();
// transform the NodeData to DocumentNodeData
let get_document_node_data = |node_id: NodeId| tree.get_node_data(node_id).map(DocumentNodeData::from);
let mut children = tree.get_children_ids(root_node_id);
if children.len() == 1 {
let node_id = children.pop().unwrap();
match get_document_node_data(node_id) {
None => serializer.serialize_str(""),
Some(node_data) => node_data.serialize(serializer),
}
} else {
let mut seq = serializer.serialize_seq(Some(children.len()))?;
for child in children {
if let Some(node_data) = get_document_node_data(child) {
let _ = seq.serialize_element(&node_data)?;
}
}
seq.end()
}
}
}
#[cfg(test)]
mod tests {
use crate::editor::document::Document;
#[test]
fn document_serde_test() {
let document: Document = serde_json::from_str(EXAMPLE_DOCUMENT).unwrap();
let _ = serde_json::to_string_pretty(&document).unwrap();
}
const EXAMPLE_DOCUMENT: &str = r#"{
"document": {
"type": "editor",
"children": [
{
"type": "image",
"attributes": {
"image_src": "https://s1.ax1x.com/2022/08/26/v2sSbR.jpg",
"align": "center"
}
},
{
"type": "text",
"attributes": { "subtype": "heading", "heading": "h1" },
"delta": [
{ "insert": "👋 " },
{ "insert": "Welcome to ", "attributes": { "bold": true } },
{
"insert": "AppFlowy Editor",
"attributes": {
"href": "appflowy.io",
"italic": true,
"bold": true
}
}
]
},
{ "type": "text", "delta": [] },
{
"type": "text",
"delta": [
{ "insert": "AppFlowy Editor is a " },
{ "insert": "highly customizable", "attributes": { "bold": true } },
{ "insert": " " },
{ "insert": "rich-text editor", "attributes": { "italic": true } },
{ "insert": " for " },
{ "insert": "Flutter", "attributes": { "underline": true } }
]
},
{
"type": "text",
"attributes": { "checkbox": true, "subtype": "checkbox" },
"delta": [{ "insert": "Customizable" }]
},
{
"type": "text",
"attributes": { "checkbox": true, "subtype": "checkbox" },
"delta": [{ "insert": "Test-covered" }]
},
{
"type": "text",
"attributes": { "checkbox": false, "subtype": "checkbox" },
"delta": [{ "insert": "more to come!" }]
},
{ "type": "text", "delta": [] },
{
"type": "text",
"attributes": { "subtype": "quote" },
"delta": [{ "insert": "Here is an example you can give a try" }]
},
{ "type": "text", "delta": [] },
{
"type": "text",
"delta": [
{ "insert": "You can also use " },
{
"insert": "AppFlowy Editor",
"attributes": {
"italic": true,
"bold": true,
"backgroundColor": "0x6000BCF0"
}
},
{ "insert": " as a component to build your own app." }
]
},
{ "type": "text", "delta": [] },
{
"type": "text",
"attributes": { "subtype": "bulleted-list" },
"delta": [{ "insert": "Use / to insert blocks" }]
},
{
"type": "text",
"attributes": { "subtype": "bulleted-list" },
"delta": [
{
"insert": "Select text to trigger to the toolbar to format your notes."
}
]
},
{ "type": "text", "delta": [] },
{
"type": "text",
"delta": [
{
"insert": "If you have questions or feedback, please submit an issue on Github or join the community along with 1000+ builders!"
}
]
}
]
}
}
"#;
}

View File

@ -0,0 +1,92 @@
use crate::editor::document::{Document, DocumentRevisionSerde};
use crate::editor::queue::{Command, CommandSender, DocumentQueue};
use crate::{DocumentEditor, DocumentUser};
use bytes::Bytes;
use flowy_error::{internal_error, FlowyError, FlowyResult};
use flowy_revision::{RevisionCloudService, RevisionManager};
use flowy_sync::entities::ws_data::ServerRevisionWSData;
use lib_infra::future::FutureResult;
use lib_ot::core::Transaction;
use lib_ws::WSConnectState;
use std::any::Any;
use std::sync::Arc;
use tokio::sync::{mpsc, oneshot};
pub struct AppFlowyDocumentEditor {
#[allow(dead_code)]
doc_id: String,
command_sender: CommandSender,
}
impl AppFlowyDocumentEditor {
pub async fn new(
doc_id: &str,
user: Arc<dyn DocumentUser>,
mut rev_manager: RevisionManager,
cloud_service: Arc<dyn RevisionCloudService>,
) -> FlowyResult<Arc<Self>> {
let document = rev_manager.load::<DocumentRevisionSerde>(Some(cloud_service)).await?;
let rev_manager = Arc::new(rev_manager);
let command_sender = spawn_edit_queue(user, rev_manager, document);
let doc_id = doc_id.to_string();
let editor = Arc::new(Self { doc_id, command_sender });
Ok(editor)
}
pub async fn apply_transaction(&self, transaction: Transaction) -> FlowyResult<()> {
let (ret, rx) = oneshot::channel::<FlowyResult<()>>();
let _ = self
.command_sender
.send(Command::ComposeTransaction { transaction, ret })
.await;
let _ = rx.await.map_err(internal_error)??;
Ok(())
}
pub async fn get_content(&self, pretty: bool) -> FlowyResult<String> {
let (ret, rx) = oneshot::channel::<FlowyResult<String>>();
let _ = self
.command_sender
.send(Command::GetDocumentContent { pretty, ret })
.await;
let content = rx.await.map_err(internal_error)??;
Ok(content)
}
}
fn spawn_edit_queue(
user: Arc<dyn DocumentUser>,
rev_manager: Arc<RevisionManager>,
document: Document,
) -> CommandSender {
let (sender, receiver) = mpsc::channel(1000);
let queue = DocumentQueue::new(user, rev_manager, document, receiver);
tokio::spawn(queue.run());
sender
}
impl DocumentEditor for Arc<AppFlowyDocumentEditor> {
fn get_operations_str(&self) -> FutureResult<String, FlowyError> {
todo!()
}
fn compose_local_operations(&self, _data: Bytes) -> FutureResult<(), FlowyError> {
todo!()
}
fn close(&self) {
todo!()
}
fn receive_ws_data(&self, _data: ServerRevisionWSData) -> FutureResult<(), FlowyError> {
todo!()
}
fn receive_ws_state(&self, _state: &WSConnectState) {
todo!()
}
fn as_any(&self) -> &dyn Any {
self
}
}

View File

@ -0,0 +1,8 @@
#![allow(clippy::module_inception)]
mod document;
mod document_serde;
mod editor;
mod queue;
pub use document::*;
pub use editor::*;

View File

@ -0,0 +1,78 @@
use crate::editor::document::Document;
use crate::DocumentUser;
use async_stream::stream;
use flowy_error::FlowyError;
use flowy_revision::RevisionManager;
use futures::stream::StreamExt;
use lib_ot::core::Transaction;
use std::sync::Arc;
use tokio::sync::mpsc::{Receiver, Sender};
use tokio::sync::{oneshot, RwLock};
pub struct DocumentQueue {
#[allow(dead_code)]
user: Arc<dyn DocumentUser>,
document: Arc<RwLock<Document>>,
#[allow(dead_code)]
rev_manager: Arc<RevisionManager>,
receiver: Option<CommandReceiver>,
}
impl DocumentQueue {
pub fn new(
user: Arc<dyn DocumentUser>,
rev_manager: Arc<RevisionManager>,
document: Document,
receiver: CommandReceiver,
) -> Self {
let document = Arc::new(RwLock::new(document));
Self {
user,
document,
rev_manager,
receiver: Some(receiver),
}
}
pub async fn run(mut self) {
let mut receiver = self.receiver.take().expect("Only take once");
let stream = stream! {
loop {
match receiver.recv().await {
Some(msg) => yield msg,
None => break,
}
}
};
stream
.for_each(|command| async {
match self.handle_command(command).await {
Ok(_) => {}
Err(e) => tracing::debug!("[DocumentQueue]: {}", e),
}
})
.await;
}
async fn handle_command(&self, command: Command) -> Result<(), FlowyError> {
match command {
Command::ComposeTransaction { transaction, ret } => {
self.document.write().await.apply_transaction(transaction)?;
let _ = ret.send(Ok(()));
}
Command::GetDocumentContent { pretty, ret } => {
let content = self.document.read().await.get_content(pretty)?;
let _ = ret.send(Ok(content));
}
}
Ok(())
}
}
pub(crate) type CommandSender = Sender<Command>;
pub(crate) type CommandReceiver = Receiver<Command>;
pub(crate) type Ret<T> = oneshot::Sender<Result<T, FlowyError>>;
pub enum Command {
ComposeTransaction { transaction: Transaction, ret: Ret<()> },
GetDocumentContent { pretty: bool, ret: Ret<String> },
}

View File

@ -9,13 +9,13 @@ pub enum ExportType {
Link = 2,
}
impl std::default::Default for ExportType {
impl Default for ExportType {
fn default() -> Self {
ExportType::Text
}
}
impl std::convert::From<i32> for ExportType {
impl From<i32> for ExportType {
fn from(val: i32) -> Self {
match val {
0 => ExportType::Text,
@ -37,10 +37,6 @@ pub struct EditPayloadPB {
// Encode in JSON format
#[pb(index = 2)]
pub operations: String,
// Encode in JSON format
#[pb(index = 3)]
pub operations_str: String,
}
#[derive(Default)]
@ -49,9 +45,6 @@ pub struct EditParams {
// Encode in JSON format
pub operations: String,
// Encode in JSON format
pub operations_str: String,
}
impl TryInto<EditParams> for EditPayloadPB {
@ -60,7 +53,6 @@ impl TryInto<EditParams> for EditPayloadPB {
Ok(EditParams {
doc_id: self.doc_id,
operations: self.operations,
operations_str: self.operations_str,
})
}
}

View File

@ -12,7 +12,7 @@ pub(crate) async fn get_document_handler(
) -> DataResult<DocumentSnapshotPB, FlowyError> {
let document_id: DocumentIdPB = data.into_inner();
let editor = manager.open_document_editor(&document_id).await?;
let operations_str = editor.get_operation_str().await?;
let operations_str = editor.get_operations_str().await?;
data_result(DocumentSnapshotPB {
doc_id: document_id.into(),
snapshot: operations_str,
@ -35,7 +35,7 @@ pub(crate) async fn export_handler(
) -> DataResult<ExportDataPB, FlowyError> {
let params: ExportParams = data.into_inner().try_into()?;
let editor = manager.open_document_editor(&params.view_id).await?;
let operations_str = editor.get_operation_str().await?;
let operations_str = editor.get_operations_str().await?;
data_result(ExportDataPB {
data: operations_str,
export_type: params.export_type,

View File

@ -1,12 +1,12 @@
pub mod editor;
mod entities;
mod event_handler;
pub mod event_map;
pub mod manager;
mod queue;
mod web_socket;
pub mod editor;
pub mod old_editor;
pub mod protobuf;
pub use manager::*;
pub mod errors {
pub use flowy_error::{internal_error, ErrorCode, FlowyError};

View File

@ -1,6 +1,7 @@
use crate::editor::DocumentRevisionCompactor;
use crate::editor::{initial_document_content, AppFlowyDocumentEditor};
use crate::entities::EditParams;
use crate::{editor::DocumentEditor, errors::FlowyError, DocumentCloudService};
use crate::old_editor::editor::{DocumentRevisionCompress, OldDocumentEditor};
use crate::{errors::FlowyError, DocumentCloudService};
use bytes::Bytes;
use dashmap::DashMap;
use flowy_database::ConnectionPool;
@ -9,12 +10,16 @@ use flowy_revision::disk::SQLiteDocumentRevisionPersistence;
use flowy_revision::{
RevisionCloudService, RevisionManager, RevisionPersistence, RevisionWebSocket, SQLiteRevisionSnapshotPersistence,
};
use flowy_sync::client_document::initial_old_document_content;
use flowy_sync::entities::{
document::{DocumentIdPB, DocumentOperationsPB},
revision::{md5, RepeatedRevision, Revision},
ws_data::ServerRevisionWSData,
};
use lib_infra::future::FutureResult;
use lib_ws::WSConnectState;
use std::any::Any;
use std::{convert::TryInto, sync::Arc};
pub trait DocumentUser: Send + Sync {
@ -24,11 +29,36 @@ pub trait DocumentUser: Send + Sync {
fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError>;
}
pub trait DocumentEditor: Send + Sync {
fn get_operations_str(&self) -> FutureResult<String, FlowyError>;
fn compose_local_operations(&self, data: Bytes) -> FutureResult<(), FlowyError>;
fn close(&self);
fn receive_ws_data(&self, data: ServerRevisionWSData) -> FutureResult<(), FlowyError>;
fn receive_ws_state(&self, state: &WSConnectState);
/// Returns the `Any` reference that can be used to downcast back to the original,
/// concrete type.
///
/// The indirection through `as_any` is because using `downcast_ref`
/// on `Box<A>` *directly* only lets us downcast back to `&A` again. You can take a look at [this](https://stackoverflow.com/questions/33687447/how-to-get-a-reference-to-a-concrete-type-from-a-trait-object)
/// for more information.
///
///
fn as_any(&self) -> &dyn Any;
}
#[derive(Clone, Debug)]
pub struct DocumentConfig {
pub use_new_editor: bool,
}
pub struct DocumentManager {
cloud_service: Arc<dyn DocumentCloudService>,
rev_web_socket: Arc<dyn RevisionWebSocket>,
editor_map: Arc<DocumentEditorMap>,
user: Arc<dyn DocumentUser>,
config: DocumentConfig,
}
impl DocumentManager {
@ -36,12 +66,14 @@ impl DocumentManager {
cloud_service: Arc<dyn DocumentCloudService>,
document_user: Arc<dyn DocumentUser>,
rev_web_socket: Arc<dyn RevisionWebSocket>,
config: DocumentConfig,
) -> Self {
Self {
cloud_service,
rev_web_socket,
editor_map: Arc::new(DocumentEditorMap::new()),
user: document_user,
config,
}
}
@ -52,10 +84,13 @@ impl DocumentManager {
}
#[tracing::instrument(level = "trace", skip(self, editor_id), fields(editor_id), err)]
pub async fn open_document_editor<T: AsRef<str>>(&self, editor_id: T) -> Result<Arc<DocumentEditor>, FlowyError> {
pub async fn open_document_editor<T: AsRef<str>>(
&self,
editor_id: T,
) -> Result<Arc<dyn DocumentEditor>, FlowyError> {
let editor_id = editor_id.as_ref();
tracing::Span::current().record("editor_id", &editor_id);
self.get_document_editor(editor_id).await
self.init_document_editor(editor_id).await
}
#[tracing::instrument(level = "trace", skip(self, editor_id), fields(editor_id), err)]
@ -75,7 +110,7 @@ impl DocumentManager {
let _ = editor
.compose_local_operations(Bytes::from(payload.operations_str))
.await?;
let operations_str = editor.get_operation_str().await?;
let operations_str = editor.get_operations_str().await?;
Ok(DocumentOperationsPB {
doc_id: payload.doc_id.clone(),
operations_str,
@ -84,9 +119,7 @@ impl DocumentManager {
pub async fn apply_edit(&self, params: EditParams) -> FlowyResult<()> {
let editor = self.get_document_editor(&params.doc_id).await?;
let _ = editor
.compose_local_operations(Bytes::from(params.operations_str))
.await?;
let _ = editor.compose_local_operations(Bytes::from(params.operations)).await?;
Ok(())
}
@ -114,12 +147,18 @@ impl DocumentManager {
}
}
}
pub fn initial_document_content(&self) -> String {
if self.config.use_new_editor {
initial_document_content()
} else {
initial_old_document_content()
}
}
}
impl DocumentManager {
/// Returns the `DocumentEditor`
/// Initializes the document editor if it's not initialized yet. Otherwise, returns the opened
/// editor.
///
/// # Arguments
///
@ -127,12 +166,9 @@ impl DocumentManager {
///
/// returns: Result<Arc<DocumentEditor>, FlowyError>
///
async fn get_document_editor(&self, doc_id: &str) -> FlowyResult<Arc<DocumentEditor>> {
async fn get_document_editor(&self, doc_id: &str) -> FlowyResult<Arc<dyn DocumentEditor>> {
match self.editor_map.get(doc_id) {
None => {
let db_pool = self.user.db_pool()?;
self.init_document_editor(doc_id, db_pool).await
}
None => self.init_document_editor(doc_id).await,
Some(editor) => Ok(editor),
}
}
@ -146,12 +182,9 @@ impl DocumentManager {
///
/// returns: Result<Arc<DocumentEditor>, FlowyError>
///
#[tracing::instrument(level = "trace", skip(self, pool), err)]
async fn init_document_editor(
&self,
doc_id: &str,
pool: Arc<ConnectionPool>,
) -> Result<Arc<DocumentEditor>, FlowyError> {
#[tracing::instrument(level = "trace", skip(self), err)]
pub async fn init_document_editor(&self, doc_id: &str) -> Result<Arc<dyn DocumentEditor>, FlowyError> {
let pool = self.user.db_pool()?;
let user = self.user.clone();
let token = self.user.token()?;
let rev_manager = self.make_document_rev_manager(doc_id, pool.clone())?;
@ -159,8 +192,16 @@ impl DocumentManager {
token,
server: self.cloud_service.clone(),
});
let editor = DocumentEditor::new(doc_id, user, rev_manager, self.rev_web_socket.clone(), cloud_service).await?;
self.editor_map.insert(doc_id, &editor);
let editor: Arc<dyn DocumentEditor> = if self.config.use_new_editor {
let editor = AppFlowyDocumentEditor::new(doc_id, user, rev_manager, cloud_service).await?;
Arc::new(editor)
} else {
let editor =
OldDocumentEditor::new(doc_id, user, rev_manager, self.rev_web_socket.clone(), cloud_service).await?;
Arc::new(editor)
};
self.editor_map.insert(doc_id, editor.clone());
Ok(editor)
}
@ -174,7 +215,7 @@ impl DocumentManager {
let rev_persistence = RevisionPersistence::new(&user_id, doc_id, disk_cache);
// let history_persistence = SQLiteRevisionHistoryPersistence::new(doc_id, pool.clone());
let snapshot_persistence = SQLiteRevisionSnapshotPersistence::new(doc_id, pool);
let rev_compactor = DocumentRevisionCompactor();
let rev_compactor = DocumentRevisionCompress();
Ok(RevisionManager::new(
&user_id,
@ -222,7 +263,7 @@ impl RevisionCloudService for DocumentRevisionCloudService {
}
pub struct DocumentEditorMap {
inner: DashMap<String, Arc<DocumentEditor>>,
inner: DashMap<String, Arc<dyn DocumentEditor>>,
}
impl DocumentEditorMap {
@ -230,20 +271,20 @@ impl DocumentEditorMap {
Self { inner: DashMap::new() }
}
pub(crate) fn insert(&self, editor_id: &str, doc: &Arc<DocumentEditor>) {
pub(crate) fn insert(&self, editor_id: &str, editor: Arc<dyn DocumentEditor>) {
if self.inner.contains_key(editor_id) {
log::warn!("Doc:{} already exists in cache", editor_id);
log::warn!("Editor:{} already exists in cache", editor_id);
}
self.inner.insert(editor_id.to_string(), doc.clone());
self.inner.insert(editor_id.to_string(), editor);
}
pub(crate) fn get(&self, editor_id: &str) -> Option<Arc<DocumentEditor>> {
pub(crate) fn get(&self, editor_id: &str) -> Option<Arc<dyn DocumentEditor>> {
Some(self.inner.get(editor_id)?.clone())
}
pub(crate) fn remove(&self, editor_id: &str) {
if let Some(editor) = self.get(editor_id) {
editor.stop()
editor.close()
}
self.inner.remove(editor_id);
}

View File

@ -0,0 +1 @@

View File

@ -1,9 +1,7 @@
use crate::web_socket::EditorCommandSender;
use crate::{
errors::FlowyError,
queue::{EditDocumentQueue, EditorCommand},
DocumentUser,
};
#![allow(unused_attributes)]
#![allow(unused_attributes)]
use crate::old_editor::queue::{EditDocumentQueue, EditorCommand, EditorCommandSender};
use crate::{errors::FlowyError, DocumentEditor, DocumentUser};
use bytes::Bytes;
use flowy_error::{internal_error, FlowyResult};
use flowy_revision::{
@ -16,16 +14,18 @@ use flowy_sync::{
errors::CollaborateResult,
util::make_operations_from_revisions,
};
use lib_infra::future::FutureResult;
use lib_ot::core::{AttributeEntry, AttributeHashMap};
use lib_ot::{
core::{DeltaOperation, Interval},
text_delta::TextOperations,
};
use lib_ws::WSConnectState;
use std::any::Any;
use std::sync::Arc;
use tokio::sync::{mpsc, oneshot};
pub struct DocumentEditor {
pub struct OldDocumentEditor {
pub doc_id: String,
#[allow(dead_code)]
rev_manager: Arc<RevisionManager>,
@ -34,7 +34,7 @@ pub struct DocumentEditor {
edit_cmd_tx: EditorCommandSender,
}
impl DocumentEditor {
impl OldDocumentEditor {
#[allow(unused_variables)]
pub(crate) async fn new(
doc_id: &str,
@ -43,15 +43,17 @@ impl DocumentEditor {
rev_web_socket: Arc<dyn RevisionWebSocket>,
cloud_service: Arc<dyn RevisionCloudService>,
) -> FlowyResult<Arc<Self>> {
let document_info = rev_manager.load::<DocumentRevisionSerde>(Some(cloud_service)).await?;
let operations = TextOperations::from_bytes(&document_info.content)?;
let document = rev_manager
.load::<DeltaDocumentRevisionSerde>(Some(cloud_service))
.await?;
let operations = TextOperations::from_bytes(&document.content)?;
let rev_manager = Arc::new(rev_manager);
let doc_id = doc_id.to_string();
let user_id = user.user_id()?;
let edit_cmd_tx = spawn_edit_queue(user, rev_manager.clone(), operations);
#[cfg(feature = "sync")]
let ws_manager = crate::web_socket::make_document_ws_manager(
let ws_manager = crate::old_editor::web_socket::make_document_ws_manager(
doc_id.clone(),
user_id.clone(),
edit_cmd_tx.clone(),
@ -142,51 +144,60 @@ impl DocumentEditor {
let _ = rx.await.map_err(internal_error)??;
Ok(())
}
}
pub async fn get_operation_str(&self) -> FlowyResult<String> {
impl DocumentEditor for Arc<OldDocumentEditor> {
fn get_operations_str(&self) -> FutureResult<String, FlowyError> {
let (ret, rx) = oneshot::channel::<CollaborateResult<String>>();
let msg = EditorCommand::StringifyOperations { ret };
let _ = self.edit_cmd_tx.send(msg).await;
let json = rx.await.map_err(internal_error)??;
Ok(json)
let msg = EditorCommand::GetOperationsString { ret };
let edit_cmd_tx = self.edit_cmd_tx.clone();
FutureResult::new(async move {
let _ = edit_cmd_tx.send(msg).await;
let json = rx.await.map_err(internal_error)??;
Ok(json)
})
}
#[tracing::instrument(level = "trace", skip(self, data), err)]
pub(crate) async fn compose_local_operations(&self, data: Bytes) -> Result<(), FlowyError> {
let operations = TextOperations::from_bytes(&data)?;
let (ret, rx) = oneshot::channel::<CollaborateResult<()>>();
let msg = EditorCommand::ComposeLocalOperations { operations, ret };
let _ = self.edit_cmd_tx.send(msg).await;
let _ = rx.await.map_err(internal_error)??;
Ok(())
fn compose_local_operations(&self, data: Bytes) -> FutureResult<(), FlowyError> {
let edit_cmd_tx = self.edit_cmd_tx.clone();
FutureResult::new(async move {
let operations = TextOperations::from_bytes(&data)?;
let (ret, rx) = oneshot::channel::<CollaborateResult<()>>();
let msg = EditorCommand::ComposeLocalOperations { operations, ret };
let _ = edit_cmd_tx.send(msg).await;
let _ = rx.await.map_err(internal_error)??;
Ok(())
})
}
#[cfg(feature = "sync")]
pub fn stop(&self) {
fn close(&self) {
#[cfg(feature = "sync")]
self.ws_manager.stop();
}
#[cfg(not(feature = "sync"))]
pub fn stop(&self) {}
#[allow(unused_variables)]
fn receive_ws_data(&self, data: ServerRevisionWSData) -> FutureResult<(), FlowyError> {
let cloned_self = self.clone();
FutureResult::new(async move {
#[cfg(feature = "sync")]
let _ = cloned_self.ws_manager.receive_ws_data(data).await?;
#[cfg(feature = "sync")]
pub(crate) async fn receive_ws_data(&self, data: ServerRevisionWSData) -> Result<(), FlowyError> {
self.ws_manager.receive_ws_data(data).await
}
#[cfg(not(feature = "sync"))]
pub(crate) async fn receive_ws_data(&self, _data: ServerRevisionWSData) -> Result<(), FlowyError> {
Ok(())
Ok(())
})
}
#[cfg(feature = "sync")]
pub(crate) fn receive_ws_state(&self, state: &WSConnectState) {
#[allow(unused_variables)]
fn receive_ws_state(&self, state: &WSConnectState) {
#[cfg(feature = "sync")]
self.ws_manager.connect_state_changed(state.clone());
}
#[cfg(not(feature = "sync"))]
pub(crate) fn receive_ws_state(&self, _state: &WSConnectState) {}
}
impl std::ops::Drop for DocumentEditor {
fn as_any(&self) -> &dyn Any {
self
}
}
impl std::ops::Drop for OldDocumentEditor {
fn drop(&mut self) {
tracing::trace!("{} DocumentEditor was dropped", self.doc_id)
}
@ -214,10 +225,10 @@ fn spawn_edit_queue(
}
#[cfg(feature = "flowy_unit_test")]
impl DocumentEditor {
impl OldDocumentEditor {
pub async fn document_operations(&self) -> FlowyResult<TextOperations> {
let (ret, rx) = oneshot::channel::<CollaborateResult<TextOperations>>();
let msg = EditorCommand::ReadOperations { ret };
let msg = EditorCommand::GetOperations { ret };
let _ = self.edit_cmd_tx.send(msg).await;
let delta = rx.await.map_err(internal_error)??;
Ok(delta)
@ -228,8 +239,8 @@ impl DocumentEditor {
}
}
pub struct DocumentRevisionSerde();
impl RevisionObjectDeserializer for DocumentRevisionSerde {
pub struct DeltaDocumentRevisionSerde();
impl RevisionObjectDeserializer for DeltaDocumentRevisionSerde {
type Output = DocumentPayloadPB;
fn deserialize_revisions(object_id: &str, revisions: Vec<Revision>) -> FlowyResult<Self::Output> {
@ -246,17 +257,17 @@ impl RevisionObjectDeserializer for DocumentRevisionSerde {
}
}
impl RevisionObjectSerializer for DocumentRevisionSerde {
fn serialize_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
impl RevisionObjectSerializer for DeltaDocumentRevisionSerde {
fn combine_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
let operations = make_operations_from_revisions::<AttributeHashMap>(revisions)?;
Ok(operations.json_bytes())
}
}
pub(crate) struct DocumentRevisionCompactor();
impl RevisionCompress for DocumentRevisionCompactor {
fn serialize_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
DocumentRevisionSerde::serialize_revisions(revisions)
pub(crate) struct DocumentRevisionCompress();
impl RevisionCompress for DocumentRevisionCompress {
fn combine_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
DeltaDocumentRevisionSerde::combine_revisions(revisions)
}
}

View File

@ -0,0 +1,4 @@
pub mod conflict;
pub mod editor;
pub mod queue;
mod web_socket;

View File

@ -1,4 +1,4 @@
use crate::web_socket::{DocumentResolveOperations, EditorCommandReceiver};
use crate::old_editor::web_socket::DocumentResolveOperations;
use crate::DocumentUser;
use async_stream::stream;
use flowy_error::FlowyError;
@ -15,6 +15,7 @@ use lib_ot::{
text_delta::TextOperations,
};
use std::sync::Arc;
use tokio::sync::mpsc::{Receiver, Sender};
use tokio::sync::{oneshot, RwLock};
// The EditorCommandQueue executes each command that will alter the document in
@ -161,11 +162,11 @@ impl EditDocumentQueue {
let _ = self.save_local_operations(operations, md5).await?;
let _ = ret.send(Ok(()));
}
EditorCommand::StringifyOperations { ret } => {
EditorCommand::GetOperationsString { ret } => {
let data = self.document.read().await.get_operations_json();
let _ = ret.send(Ok(data));
}
EditorCommand::ReadOperations { ret } => {
EditorCommand::GetOperations { ret } => {
let operations = self.document.read().await.get_operations().clone();
let _ = ret.send(Ok(operations));
}
@ -184,7 +185,8 @@ impl EditDocumentQueue {
}
pub type TextTransformOperations = TransformOperations<DocumentResolveOperations>;
pub(crate) type EditorCommandSender = Sender<EditorCommand>;
pub(crate) type EditorCommandReceiver = Receiver<EditorCommand>;
pub(crate) type Ret<T> = oneshot::Sender<Result<T, CollaborateError>>;
pub(crate) enum EditorCommand {
@ -235,11 +237,11 @@ pub(crate) enum EditorCommand {
Redo {
ret: Ret<()>,
},
StringifyOperations {
GetOperationsString {
ret: Ret<String>,
},
#[allow(dead_code)]
ReadOperations {
GetOperations {
ret: Ret<TextOperations>,
},
}
@ -259,8 +261,8 @@ impl std::fmt::Debug for EditorCommand {
EditorCommand::CanRedo { .. } => "CanRedo",
EditorCommand::Undo { .. } => "Undo",
EditorCommand::Redo { .. } => "Redo",
EditorCommand::StringifyOperations { .. } => "StringifyOperations",
EditorCommand::ReadOperations { .. } => "ReadOperations",
EditorCommand::GetOperationsString { .. } => "StringifyOperations",
EditorCommand::GetOperations { .. } => "ReadOperations",
};
f.write_str(s)
}

View File

@ -1,9 +1,10 @@
use crate::queue::TextTransformOperations;
use crate::{queue::EditorCommand, TEXT_BLOCK_SYNC_INTERVAL_IN_MILLIS};
use crate::old_editor::queue::{EditorCommand, EditorCommandSender, TextTransformOperations};
use crate::TEXT_BLOCK_SYNC_INTERVAL_IN_MILLIS;
use bytes::Bytes;
use flowy_error::{internal_error, FlowyError, FlowyResult};
use flowy_revision::*;
use flowy_sync::entities::revision::Revision;
use flowy_sync::util::make_operations_from_revisions;
use flowy_sync::{
entities::{
revision::RevisionRange,
@ -12,19 +13,10 @@ use flowy_sync::{
errors::CollaborateResult,
};
use lib_infra::future::{BoxResultFuture, FutureResult};
use flowy_sync::util::make_operations_from_revisions;
use lib_ot::text_delta::TextOperations;
use lib_ws::WSConnectState;
use std::{sync::Arc, time::Duration};
use tokio::sync::{
broadcast,
mpsc::{Receiver, Sender},
oneshot,
};
pub(crate) type EditorCommandSender = Sender<EditorCommand>;
pub(crate) type EditorCommandReceiver = Receiver<EditorCommand>;
use tokio::sync::{broadcast, oneshot};
#[derive(Clone)]
pub struct DocumentResolveOperations(pub TextOperations);

View File

@ -1,2 +0,0 @@
mod script;
mod text_block_test;

View File

@ -1,6 +1,6 @@
#![cfg_attr(rustfmt, rustfmt::skip)]
use crate::editor::{TestBuilder, TestOp::*};
use flowy_sync::client_document::{NewlineDoc, EmptyDoc};
use flowy_sync::client_document::{NewlineDocument, EmptyDocument};
use lib_ot::core::{Interval, OperationTransform, NEW_LINE, WHITESPACE, OTString};
use unicode_segmentation::UnicodeSegmentation;
use lib_ot::text_delta::TextOperations;
@ -19,7 +19,7 @@ fn attributes_bold_added() {
]"#,
),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -31,7 +31,7 @@ fn attributes_bold_added_and_invert_all() {
Bold(0, Interval::new(0, 3), false),
AssertDocJson(0, r#"[{"insert":"123"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -43,7 +43,7 @@ fn attributes_bold_added_and_invert_partial_suffix() {
Bold(0, Interval::new(2, 4), false),
AssertDocJson(0, r#"[{"insert":"12","attributes":{"bold":true}},{"insert":"34"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -57,7 +57,7 @@ fn attributes_bold_added_and_invert_partial_suffix2() {
Bold(0, Interval::new(2, 4), true),
AssertDocJson(0, r#"[{"insert":"1234","attributes":{"bold":true}}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -85,7 +85,7 @@ fn attributes_bold_added_with_new_line() {
r#"[{"insert":"123","attributes":{"bold":true}},{"insert":"\na\n"},{"insert":"456","attributes":{"bold":true}},{"insert":"\n"}]"#,
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -97,7 +97,7 @@ fn attributes_bold_added_and_invert_partial_prefix() {
Bold(0, Interval::new(0, 2), false),
AssertDocJson(0, r#"[{"insert":"12"},{"insert":"34","attributes":{"bold":true}}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -109,7 +109,7 @@ fn attributes_bold_added_consecutive() {
Bold(0, Interval::new(1, 2), true),
AssertDocJson(0, r#"[{"insert":"12","attributes":{"bold":true}},{"insert":"34"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -128,7 +128,7 @@ fn attributes_bold_added_italic() {
r#"[{"insert":"12345678","attributes":{"bold":true,"italic":true}},{"insert":"\n"}]"#,
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -156,7 +156,7 @@ fn attributes_bold_added_italic2() {
),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -193,7 +193,7 @@ fn attributes_bold_added_italic3() {
),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -229,7 +229,7 @@ fn attributes_bold_added_italic_delete() {
AssertDocJson(0, r#"[{"insert":"67"},{"insert":"89","attributes":{"bold":true}}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -240,7 +240,7 @@ fn attributes_merge_inserted_text_with_same_attribute() {
InsertBold(0, "456", Interval::new(3, 6)),
AssertDocJson(0, r#"[{"insert":"123456","attributes":{"bold":true}}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -255,7 +255,7 @@ fn attributes_compose_attr_attributes_with_attr_attributes_test() {
AssertDocJson(1, r#"[{"insert":"1234567","attributes":{"bold":true}}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -296,7 +296,7 @@ fn attributes_compose_attr_attributes_with_attr_attributes_test2() {
),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -312,7 +312,7 @@ fn attributes_compose_attr_attributes_with_no_attr_attributes_test() {
AssertDocJson(0, expected),
AssertDocJson(1, expected),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -324,7 +324,7 @@ fn attributes_replace_heading() {
AssertDocJson(0, r#"[{"insert":"3456","attributes":{"bold":true}}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -336,7 +336,7 @@ fn attributes_replace_trailing() {
AssertDocJson(0, r#"[{"insert":"12345","attributes":{"bold":true}}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -350,7 +350,7 @@ fn attributes_replace_middle() {
AssertDocJson(0, r#"[{"insert":"34","attributes":{"bold":true}}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -362,7 +362,7 @@ fn attributes_replace_all() {
AssertDocJson(0, r#"[]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -374,7 +374,7 @@ fn attributes_replace_with_text() {
AssertDocJson(0, r#"[{"insert":"ab"},{"insert":"456","attributes":{"bold":true}}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -390,7 +390,7 @@ fn attributes_header_insert_newline_at_middle() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -415,7 +415,7 @@ fn attributes_header_insert_double_newline_at_middle() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -430,7 +430,7 @@ fn attributes_header_insert_newline_at_trailing() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -446,7 +446,7 @@ fn attributes_header_insert_double_newline_at_trailing() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -460,7 +460,7 @@ fn attributes_link_added() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -479,7 +479,7 @@ fn attributes_link_format_with_bold() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -498,7 +498,7 @@ fn attributes_link_insert_char_at_head() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -513,7 +513,7 @@ fn attributes_link_insert_char_at_middle() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -532,7 +532,7 @@ fn attributes_link_insert_char_at_trailing() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -547,7 +547,7 @@ fn attributes_link_insert_newline_at_middle() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -563,7 +563,7 @@ fn attributes_link_auto_format() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -579,7 +579,7 @@ fn attributes_link_auto_format_exist() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -595,7 +595,7 @@ fn attributes_link_auto_format_exist2() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -606,7 +606,7 @@ fn attributes_bullet_added() {
AssertDocJson(0, r#"[{"insert":"12"},{"insert":"\n","attributes":{"list":"bullet"}}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -627,7 +627,7 @@ fn attributes_bullet_added_2() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -644,7 +644,7 @@ fn attributes_bullet_remove_partial() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -660,7 +660,7 @@ fn attributes_bullet_auto_exit() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -700,7 +700,7 @@ fn attributes_preserve_block_when_insert_newline_inside() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -717,7 +717,7 @@ fn attributes_preserve_header_format_on_merge() {
AssertDocJson(0, r#"[{"insert":"123456"},{"insert":"\n","attributes":{"header":1}}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -737,7 +737,7 @@ fn attributes_format_emoji() {
r#"[{"insert":"👋 "},{"insert":"\n","attributes":{"header":1}}]"#,
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -757,7 +757,7 @@ fn attributes_preserve_list_format_on_merge() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -796,5 +796,5 @@ fn delta_compose() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}

View File

@ -1,6 +1,6 @@
#![allow(clippy::all)]
use crate::editor::{Rng, TestBuilder, TestOp::*};
use flowy_sync::client_document::{EmptyDoc, NewlineDoc};
use flowy_sync::client_document::{EmptyDocument, NewlineDocument};
use lib_ot::text_delta::TextOperationBuilder;
use lib_ot::{core::Interval, core::*, text_delta::TextOperations};
@ -11,7 +11,7 @@ fn attributes_insert_text() {
Insert(0, "456", 3),
AssertDocJson(0, r#"[{"insert":"123456"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -21,7 +21,7 @@ fn attributes_insert_text_at_head() {
Insert(0, "456", 0),
AssertDocJson(0, r#"[{"insert":"456123"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -31,7 +31,7 @@ fn attributes_insert_text_at_middle() {
Insert(0, "456", 1),
AssertDocJson(0, r#"[{"insert":"145623"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -528,7 +528,7 @@ fn transform_two_plain_delta() {
AssertDocJson(0, r#"[{"insert":"123456"}]"#),
AssertDocJson(1, r#"[{"insert":"123456"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -542,7 +542,7 @@ fn transform_two_plain_delta2() {
AssertDocJson(0, r#"[{"insert":"123456"}]"#),
AssertDocJson(1, r#"[{"insert":"123456"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -560,7 +560,7 @@ fn transform_two_non_seq_delta() {
AssertDocJson(0, r#"[{"insert":"123456"}]"#),
AssertDocJson(1, r#"[{"insert":"123456789"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -575,7 +575,7 @@ fn transform_two_conflict_non_seq_delta() {
AssertDocJson(0, r#"[{"insert":"123456"}]"#),
AssertDocJson(1, r#"[{"insert":"12378456"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -602,7 +602,7 @@ fn delta_invert_no_attribute_delta2() {
Invert(0, 1),
AssertDocJson(0, r#"[{"insert":"123"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -615,7 +615,7 @@ fn delta_invert_attribute_delta_with_no_attribute_delta() {
Invert(0, 1),
AssertDocJson(0, r#"[{"insert":"123","attributes":{"bold":true}}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -650,7 +650,7 @@ fn delta_invert_attribute_delta_with_no_attribute_delta2() {
]"#,
),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -663,7 +663,7 @@ fn delta_invert_no_attribute_delta_with_attribute_delta() {
Invert(0, 1),
AssertDocJson(0, r#"[{"insert":"123"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -682,7 +682,7 @@ fn delta_invert_no_attribute_delta_with_attribute_delta2() {
Invert(0, 1),
AssertDocJson(0, r#"[{"insert":"123"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -723,7 +723,7 @@ fn delta_invert_attribute_delta_with_attribute_delta() {
]"#,
),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -733,7 +733,7 @@ fn delta_compose_str() {
Insert(0, "2", 1),
AssertDocJson(0, r#"[{"insert":"12\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -746,5 +746,5 @@ fn delta_compose_with_missing_delta() {
AssertDocJson(0, r#"[{"insert":"1234\n"}]"#),
AssertStr(1, r#"4\n"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}

View File

@ -1,4 +1,4 @@
use flowy_sync::client_document::{ClientDocument, EmptyDoc};
use flowy_sync::client_document::{ClientDocument, EmptyDocument};
use lib_ot::text_delta::TextOperation;
use lib_ot::{
core::*,
@ -101,7 +101,7 @@ fn delta_deserialize_null_test() {
#[test]
fn document_insert_serde_test() {
let mut document = ClientDocument::new::<EmptyDoc>();
let mut document = ClientDocument::new::<EmptyDocument>();
document.insert(0, "\n").unwrap();
document.insert(0, "123").unwrap();
let json = document.get_operations_json();

View File

@ -1,11 +1,11 @@
use crate::editor::{TestBuilder, TestOp::*};
use flowy_sync::client_document::{EmptyDoc, NewlineDoc, RECORD_THRESHOLD};
use flowy_sync::client_document::{EmptyDocument, NewlineDocument, RECORD_THRESHOLD};
use lib_ot::core::{Interval, NEW_LINE, WHITESPACE};
#[test]
fn history_insert_undo() {
let ops = vec![Insert(0, "123", 0), Undo(0), AssertDocJson(0, r#"[{"insert":"\n"}]"#)];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -19,7 +19,7 @@ fn history_insert_undo_with_lagging() {
Undo(0),
AssertDocJson(0, r#"[{"insert":"\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -32,7 +32,7 @@ fn history_insert_redo() {
Redo(0),
AssertDocJson(0, r#"[{"insert":"123\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -51,7 +51,7 @@ fn history_insert_redo_with_lagging() {
Undo(0),
AssertDocJson(0, r#"[{"insert":"123\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -62,7 +62,7 @@ fn history_bold_undo() {
Undo(0),
AssertDocJson(0, r#"[{"insert":"\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -74,7 +74,7 @@ fn history_bold_undo_with_lagging() {
Undo(0),
AssertDocJson(0, r#"[{"insert":"123\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -87,7 +87,7 @@ fn history_bold_redo() {
Redo(0),
AssertDocJson(0, r#" [{"insert":"123","attributes":{"bold":true}},{"insert":"\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -101,7 +101,7 @@ fn history_bold_redo_with_lagging() {
Redo(0),
AssertDocJson(0, r#"[{"insert":"123","attributes":{"bold":true}},{"insert":"\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -115,7 +115,7 @@ fn history_delete_undo() {
Undo(0),
AssertDocJson(0, r#"[{"insert":"123"}]"#),
];
TestBuilder::new().run_scripts::<EmptyDoc>(ops);
TestBuilder::new().run_scripts::<EmptyDocument>(ops);
}
#[test]
@ -134,7 +134,7 @@ fn history_delete_undo_2() {
Undo(0),
AssertDocJson(0, r#"[{"insert":"\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -161,7 +161,7 @@ fn history_delete_undo_with_lagging() {
"#,
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -175,7 +175,7 @@ fn history_delete_redo() {
Redo(0),
AssertDocJson(0, r#"[{"insert":"\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -194,7 +194,7 @@ fn history_replace_undo() {
Undo(0),
AssertDocJson(0, r#"[{"insert":"\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -215,7 +215,7 @@ fn history_replace_undo_with_lagging() {
Undo(0),
AssertDocJson(0, r#"[{"insert":"123","attributes":{"bold":true}},{"insert":"\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -234,7 +234,7 @@ fn history_replace_redo() {
"#,
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -253,7 +253,7 @@ fn history_header_added_undo() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -272,7 +272,7 @@ fn history_link_added_undo() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -291,7 +291,7 @@ fn history_link_auto_format_undo_with_lagging() {
AssertDocJson(0, r#"[{"insert":"https://appflowy.io\n"}]"#),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -314,7 +314,7 @@ fn history_bullet_undo() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -342,7 +342,7 @@ fn history_bullet_undo_with_lagging() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}
#[test]
@ -369,5 +369,5 @@ fn history_undo_attribute_on_merge_between_line() {
),
];
TestBuilder::new().run_scripts::<NewlineDoc>(ops);
TestBuilder::new().run_scripts::<NewlineDocument>(ops);
}

View File

@ -1,2 +1,3 @@
mod document;
mod editor;
mod new_document;
mod old_document;

View File

@ -0,0 +1,2 @@
mod script;
mod test;

View File

@ -0,0 +1,84 @@
use flowy_document::editor::AppFlowyDocumentEditor;
use flowy_test::helper::ViewTest;
use flowy_test::FlowySDKTest;
use lib_ot::core::{Body, Changeset, NodeDataBuilder, NodeOperation, Path, Transaction};
use lib_ot::text_delta::TextOperations;
use std::sync::Arc;
pub enum EditScript {
InsertText { path: Path, delta: TextOperations },
UpdateText { path: Path, delta: TextOperations },
Delete { path: Path },
AssertContent { expected: &'static str },
AssertPrettyContent { expected: &'static str },
}
pub struct DocumentEditorTest {
pub sdk: FlowySDKTest,
pub editor: Arc<AppFlowyDocumentEditor>,
}
impl DocumentEditorTest {
pub async fn new() -> Self {
let sdk = FlowySDKTest::new(true);
let _ = sdk.init_user().await;
let test = ViewTest::new_document_view(&sdk).await;
let document_editor = sdk.document_manager.open_document_editor(&test.view.id).await.unwrap();
let editor = match document_editor.as_any().downcast_ref::<Arc<AppFlowyDocumentEditor>>() {
None => panic!(),
Some(editor) => editor.clone(),
};
Self { sdk, editor }
}
pub async fn run_scripts(&self, scripts: Vec<EditScript>) {
for script in scripts {
self.run_script(script).await;
}
}
async fn run_script(&self, script: EditScript) {
match script {
EditScript::InsertText { path, delta } => {
let node_data = NodeDataBuilder::new("text").insert_body(Body::Delta(delta)).build();
let operation = NodeOperation::Insert {
path,
nodes: vec![node_data],
};
self.editor
.apply_transaction(Transaction::from_operations(vec![operation]))
.await
.unwrap();
}
EditScript::UpdateText { path, delta } => {
let inverted = delta.invert_str("");
let changeset = Changeset::Delta { delta, inverted };
let operation = NodeOperation::Update { path, changeset };
self.editor
.apply_transaction(Transaction::from_operations(vec![operation]))
.await
.unwrap();
}
EditScript::Delete { path } => {
let operation = NodeOperation::Delete { path, nodes: vec![] };
self.editor
.apply_transaction(Transaction::from_operations(vec![operation]))
.await
.unwrap();
}
EditScript::AssertContent { expected } => {
//
let content = self.editor.get_content(false).await.unwrap();
assert_eq!(content, expected);
}
EditScript::AssertPrettyContent { expected } => {
//
let content = self.editor.get_content(true).await.unwrap();
assert_eq!(content, expected);
}
}
}
}

View File

@ -0,0 +1,156 @@
use crate::new_document::script::DocumentEditorTest;
use crate::new_document::script::EditScript::*;
use lib_ot::text_delta::TextOperationBuilder;
#[tokio::test]
async fn document_initialize_test() {
let scripts = vec![AssertContent {
expected: r#"{"document":{"type":"editor","children":[{"type":"text"}]}}"#,
}];
DocumentEditorTest::new().await.run_scripts(scripts).await;
}
#[tokio::test]
async fn document_insert_text_test() {
let delta = TextOperationBuilder::new().insert("Hello world").build();
let expected = r#"{
"document": {
"type": "editor",
"children": [
{
"type": "text",
"delta": [
{
"insert": "Hello world"
}
]
},
{
"type": "text"
}
]
}
}"#;
let scripts = vec![
InsertText {
path: vec![0, 0].into(),
delta,
},
AssertPrettyContent { expected },
];
DocumentEditorTest::new().await.run_scripts(scripts).await;
}
#[tokio::test]
async fn document_update_text_test() {
let test = DocumentEditorTest::new().await;
let hello_world = "Hello world".to_string();
let scripts = vec![
UpdateText {
path: vec![0, 0].into(),
delta: TextOperationBuilder::new().insert(&hello_world).build(),
},
AssertPrettyContent {
expected: r#"{
"document": {
"type": "editor",
"children": [
{
"type": "text",
"delta": [
{
"insert": "Hello world"
}
]
}
]
}
}"#,
},
];
test.run_scripts(scripts).await;
let scripts = vec![
UpdateText {
path: vec![0, 0].into(),
delta: TextOperationBuilder::new()
.retain(hello_world.len())
.insert(", AppFlowy")
.build(),
},
AssertPrettyContent {
expected: r#"{
"document": {
"type": "editor",
"children": [
{
"type": "text",
"delta": [
{
"insert": "Hello world, AppFlowy"
}
]
}
]
}
}"#,
},
];
test.run_scripts(scripts).await;
}
#[tokio::test]
async fn document_delete_text_test() {
let expected = r#"{
"document": {
"type": "editor",
"children": [
{
"type": "text",
"delta": [
{
"insert": "Hello"
}
]
}
]
}
}"#;
let hello_world = "Hello world".to_string();
let scripts = vec![
UpdateText {
path: vec![0, 0].into(),
delta: TextOperationBuilder::new().insert(&hello_world).build(),
},
UpdateText {
path: vec![0, 0].into(),
delta: TextOperationBuilder::new().retain(5).delete(6).build(),
},
AssertPrettyContent { expected },
];
DocumentEditorTest::new().await.run_scripts(scripts).await;
}
#[tokio::test]
async fn document_delete_node_test() {
let scripts = vec![
UpdateText {
path: vec![0, 0].into(),
delta: TextOperationBuilder::new().insert("Hello world").build(),
},
AssertContent {
expected: r#"{"document":{"type":"editor","children":[{"type":"text","delta":[{"insert":"Hello world"}]}]}}"#,
},
Delete {
path: vec![0, 0].into(),
},
AssertContent {
expected: r#"{"document":{"type":"editor"}}"#,
},
];
DocumentEditorTest::new().await.run_scripts(scripts).await;
}

View File

@ -0,0 +1,2 @@
mod old_document_test;
mod script;

View File

@ -1,4 +1,4 @@
use crate::document::script::{EditorScript::*, *};
use crate::old_document::script::{EditorScript::*, *};
use flowy_revision::disk::RevisionState;
use lib_ot::core::{count_utf16_code_units, Interval};
@ -14,7 +14,7 @@ async fn text_block_sync_current_rev_id_check() {
AssertNextSyncRevId(None),
AssertJson(r#"[{"insert":"123\n"}]"#),
];
DocumentEditorTest::new().await.run_scripts(scripts).await;
OldDocumentEditorTest::new().await.run_scripts(scripts).await;
}
#[tokio::test]
@ -28,7 +28,7 @@ async fn text_block_sync_state_check() {
AssertRevisionState(3, RevisionState::Ack),
AssertJson(r#"[{"insert":"123\n"}]"#),
];
DocumentEditorTest::new().await.run_scripts(scripts).await;
OldDocumentEditorTest::new().await.run_scripts(scripts).await;
}
#[tokio::test]
@ -40,7 +40,7 @@ async fn text_block_sync_insert_test() {
AssertJson(r#"[{"insert":"123\n"}]"#),
AssertNextSyncRevId(None),
];
DocumentEditorTest::new().await.run_scripts(scripts).await;
OldDocumentEditorTest::new().await.run_scripts(scripts).await;
}
#[tokio::test]
@ -52,7 +52,7 @@ async fn text_block_sync_insert_in_chinese() {
InsertText("", offset),
AssertJson(r#"[{"insert":"你好\n"}]"#),
];
DocumentEditorTest::new().await.run_scripts(scripts).await;
OldDocumentEditorTest::new().await.run_scripts(scripts).await;
}
#[tokio::test]
@ -64,7 +64,7 @@ async fn text_block_sync_insert_with_emoji() {
InsertText("☺️", offset),
AssertJson(r#"[{"insert":"😁☺️\n"}]"#),
];
DocumentEditorTest::new().await.run_scripts(scripts).await;
OldDocumentEditorTest::new().await.run_scripts(scripts).await;
}
#[tokio::test]
@ -76,7 +76,7 @@ async fn text_block_sync_delete_in_english() {
Delete(Interval::new(0, 2)),
AssertJson(r#"[{"insert":"3\n"}]"#),
];
DocumentEditorTest::new().await.run_scripts(scripts).await;
OldDocumentEditorTest::new().await.run_scripts(scripts).await;
}
#[tokio::test]
@ -89,7 +89,7 @@ async fn text_block_sync_delete_in_chinese() {
Delete(Interval::new(0, offset)),
AssertJson(r#"[{"insert":"好\n"}]"#),
];
DocumentEditorTest::new().await.run_scripts(scripts).await;
OldDocumentEditorTest::new().await.run_scripts(scripts).await;
}
#[tokio::test]
@ -101,5 +101,5 @@ async fn text_block_sync_replace_test() {
Replace(Interval::new(0, 3), "abc"),
AssertJson(r#"[{"insert":"abc\n"}]"#),
];
DocumentEditorTest::new().await.run_scripts(scripts).await;
OldDocumentEditorTest::new().await.run_scripts(scripts).await;
}

View File

@ -1,4 +1,4 @@
use flowy_document::editor::DocumentEditor;
use flowy_document::old_editor::editor::OldDocumentEditor;
use flowy_document::TEXT_BLOCK_SYNC_INTERVAL_IN_MILLIS;
use flowy_revision::disk::RevisionState;
use flowy_test::{helper::ViewTest, FlowySDKTest};
@ -17,21 +17,21 @@ pub enum EditorScript {
AssertJson(&'static str),
}
pub struct DocumentEditorTest {
pub struct OldDocumentEditorTest {
pub sdk: FlowySDKTest,
pub editor: Arc<DocumentEditor>,
pub editor: Arc<OldDocumentEditor>,
}
impl DocumentEditorTest {
impl OldDocumentEditorTest {
pub async fn new() -> Self {
let sdk = FlowySDKTest::default();
let _ = sdk.init_user().await;
let test = ViewTest::new_text_block_view(&sdk).await;
let editor = sdk
.text_block_manager
.open_document_editor(&test.view.id)
.await
.unwrap();
let test = ViewTest::new_document_view(&sdk).await;
let document_editor = sdk.document_manager.open_document_editor(&test.view.id).await.unwrap();
let editor = match document_editor.as_any().downcast_ref::<Arc<OldDocumentEditor>>() {
None => panic!(),
Some(editor) => editor.clone(),
};
Self { sdk, editor }
}

View File

@ -67,6 +67,7 @@ impl FlowyError {
static_flowy_error!(text_too_long, ErrorCode::TextTooLong);
static_flowy_error!(invalid_data, ErrorCode::InvalidData);
static_flowy_error!(out_of_bounds, ErrorCode::OutOfBounds);
static_flowy_error!(serde, ErrorCode::Serde);
}
impl std::convert::From<ErrorCode> for FlowyError {

View File

@ -1,6 +1,6 @@
use crate::entities::view::ViewDataTypePB;
use crate::entities::ViewLayoutTypePB;
use crate::services::folder_editor::FolderRevisionCompactor;
use crate::services::folder_editor::FolderRevisionCompress;
use crate::{
dart_notification::{send_dart_notification, FolderNotification},
entities::workspace::RepeatedWorkspacePB,
@ -16,7 +16,8 @@ use flowy_error::FlowyError;
use flowy_folder_data_model::user_default;
use flowy_revision::disk::SQLiteDocumentRevisionPersistence;
use flowy_revision::{RevisionManager, RevisionPersistence, RevisionWebSocket, SQLiteRevisionSnapshotPersistence};
use flowy_sync::client_document::default::{initial_document_str, initial_read_me};
use flowy_sync::client_document::default::initial_read_me;
use flowy_sync::{client_folder::FolderPad, entities::ws_data::ServerRevisionWSData};
use lazy_static::lazy_static;
use lib_infra::future::FutureResult;
@ -166,7 +167,7 @@ impl FolderManager {
let object_id = folder_id.as_ref();
let disk_cache = SQLiteDocumentRevisionPersistence::new(user_id, pool.clone());
let rev_persistence = RevisionPersistence::new(user_id, object_id, disk_cache);
let rev_compactor = FolderRevisionCompactor();
let rev_compactor = FolderRevisionCompress();
// let history_persistence = SQLiteRevisionHistoryPersistence::new(object_id, pool.clone());
let snapshot_persistence = SQLiteRevisionSnapshotPersistence::new(object_id, pool);
let rev_manager = RevisionManager::new(
@ -215,16 +216,14 @@ impl DefaultFolderBuilder {
set_current_workspace(&workspace_rev.id);
for app in workspace_rev.apps.iter() {
for (index, view) in app.belongings.iter().enumerate() {
let view_data = if index == 0 {
initial_read_me().json_str()
} else {
initial_document_str()
};
let _ = view_controller.set_latest_view(&view.id);
let layout_type = ViewLayoutTypePB::from(view.layout.clone());
let _ = view_controller
.create_view(&view.id, ViewDataTypePB::Text, layout_type, Bytes::from(view_data))
.await?;
if index == 0 {
let view_data = initial_read_me().json_str();
let _ = view_controller.set_latest_view(&view.id);
let layout_type = ViewLayoutTypePB::from(view.layout.clone());
let _ = view_controller
.create_view(&view.id, ViewDataTypePB::Text, layout_type, Bytes::from(view_data))
.await?;
}
}
}
let folder = FolderPad::new(vec![workspace_rev.clone()], vec![])?;

View File

@ -112,16 +112,16 @@ impl RevisionObjectDeserializer for FolderRevisionSerde {
}
impl RevisionObjectSerializer for FolderRevisionSerde {
fn serialize_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
fn combine_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
let operations = make_operations_from_revisions::<EmptyAttributes>(revisions)?;
Ok(operations.json_bytes())
}
}
pub struct FolderRevisionCompactor();
impl RevisionCompress for FolderRevisionCompactor {
fn serialize_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
FolderRevisionSerde::serialize_revisions(revisions)
pub struct FolderRevisionCompress();
impl RevisionCompress for FolderRevisionCompress {
fn combine_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
FolderRevisionSerde::combine_revisions(revisions)
}
}

View File

@ -1,6 +1,6 @@
use crate::entities::GridLayout;
use crate::services::block_editor::GridBlockRevisionCompactor;
use crate::services::grid_editor::{GridRevisionCompactor, GridRevisionEditor};
use crate::services::block_editor::GridBlockRevisionCompress;
use crate::services::grid_editor::{GridRevisionCompress, GridRevisionEditor};
use crate::services::grid_view_manager::make_grid_view_rev_manager;
use crate::services::persistence::block_index::BlockIndexCache;
use crate::services::persistence::kv::GridKVPersistence;
@ -158,7 +158,7 @@ impl GridManager {
let disk_cache = SQLiteGridRevisionPersistence::new(&user_id, pool.clone());
let rev_persistence = RevisionPersistence::new(&user_id, grid_id, disk_cache);
let snapshot_persistence = SQLiteRevisionSnapshotPersistence::new(grid_id, pool);
let rev_compactor = GridRevisionCompactor();
let rev_compactor = GridRevisionCompress();
let rev_manager = RevisionManager::new(&user_id, grid_id, rev_persistence, rev_compactor, snapshot_persistence);
Ok(rev_manager)
}
@ -167,7 +167,7 @@ impl GridManager {
let user_id = self.grid_user.user_id()?;
let disk_cache = SQLiteGridBlockRevisionPersistence::new(&user_id, pool.clone());
let rev_persistence = RevisionPersistence::new(&user_id, block_id, disk_cache);
let rev_compactor = GridBlockRevisionCompactor();
let rev_compactor = GridBlockRevisionCompress();
let snapshot_persistence = SQLiteRevisionSnapshotPersistence::new(block_id, pool);
let rev_manager =
RevisionManager::new(&user_id, block_id, rev_persistence, rev_compactor, snapshot_persistence);

View File

@ -204,15 +204,15 @@ impl RevisionObjectDeserializer for GridBlockRevisionSerde {
}
impl RevisionObjectSerializer for GridBlockRevisionSerde {
fn serialize_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
fn combine_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
let operations = make_operations_from_revisions::<EmptyAttributes>(revisions)?;
Ok(operations.json_bytes())
}
}
pub struct GridBlockRevisionCompactor();
impl RevisionCompress for GridBlockRevisionCompactor {
fn serialize_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
GridBlockRevisionSerde::serialize_revisions(revisions)
pub struct GridBlockRevisionCompress();
impl RevisionCompress for GridBlockRevisionCompress {
fn combine_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
GridBlockRevisionSerde::combine_revisions(revisions)
}
}

View File

@ -1,7 +1,7 @@
use crate::dart_notification::{send_dart_notification, GridNotification};
use crate::entities::{CellChangesetPB, GridBlockChangesetPB, InsertedRowPB, RowPB};
use crate::manager::GridUser;
use crate::services::block_editor::{GridBlockRevisionCompactor, GridBlockRevisionEditor};
use crate::services::block_editor::{GridBlockRevisionCompress, GridBlockRevisionEditor};
use crate::services::persistence::block_index::BlockIndexCache;
use crate::services::row::{block_from_row_orders, make_row_from_row_rev, GridBlockSnapshot};
use dashmap::DashMap;
@ -274,7 +274,7 @@ async fn make_block_editor(user: &Arc<dyn GridUser>, block_id: &str) -> FlowyRes
let disk_cache = SQLiteGridBlockRevisionPersistence::new(&user_id, pool.clone());
let rev_persistence = RevisionPersistence::new(&user_id, block_id, disk_cache);
let rev_compactor = GridBlockRevisionCompactor();
let rev_compactor = GridBlockRevisionCompress();
let snapshot_persistence = SQLiteRevisionSnapshotPersistence::new(block_id, pool);
let rev_manager = RevisionManager::new(&user_id, block_id, rev_persistence, rev_compactor, snapshot_persistence);
GridBlockRevisionEditor::new(&user_id, &token, block_id, rev_manager).await

View File

@ -842,7 +842,7 @@ impl RevisionObjectDeserializer for GridRevisionSerde {
}
}
impl RevisionObjectSerializer for GridRevisionSerde {
fn serialize_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
fn combine_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
let operations = make_operations_from_revisions::<EmptyAttributes>(revisions)?;
Ok(operations.json_bytes())
}
@ -859,11 +859,11 @@ impl RevisionCloudService for GridRevisionCloudService {
}
}
pub struct GridRevisionCompactor();
pub struct GridRevisionCompress();
impl RevisionCompress for GridRevisionCompactor {
fn serialize_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
GridRevisionSerde::serialize_revisions(revisions)
impl RevisionCompress for GridRevisionCompress {
fn combine_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
GridRevisionSerde::combine_revisions(revisions)
}
}

View File

@ -488,16 +488,16 @@ impl RevisionObjectDeserializer for GridViewRevisionSerde {
}
impl RevisionObjectSerializer for GridViewRevisionSerde {
fn serialize_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
fn combine_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
let operations = make_operations_from_revisions::<EmptyAttributes>(revisions)?;
Ok(operations.json_bytes())
}
}
pub struct GridViewRevisionCompactor();
impl RevisionCompress for GridViewRevisionCompactor {
fn serialize_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
GridViewRevisionSerde::serialize_revisions(revisions)
pub struct GridViewRevisionCompress();
impl RevisionCompress for GridViewRevisionCompress {
fn combine_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
GridViewRevisionSerde::combine_revisions(revisions)
}
}

View File

@ -4,7 +4,7 @@ use crate::entities::{
};
use crate::manager::GridUser;
use crate::services::grid_editor_task::GridServiceTaskScheduler;
use crate::services::grid_view_editor::{GridViewRevisionCompactor, GridViewRevisionEditor};
use crate::services::grid_view_editor::{GridViewRevisionCompress, GridViewRevisionEditor};
use dashmap::DashMap;
use flowy_error::FlowyResult;
@ -250,7 +250,7 @@ pub async fn make_grid_view_rev_manager(user: &Arc<dyn GridUser>, view_id: &str)
let disk_cache = SQLiteGridViewRevisionPersistence::new(&user_id, pool.clone());
let rev_persistence = RevisionPersistence::new(&user_id, view_id, disk_cache);
let rev_compactor = GridViewRevisionCompactor();
let rev_compactor = GridViewRevisionCompress();
let snapshot_persistence = SQLiteRevisionSnapshotPersistence::new(view_id, pool);
Ok(RevisionManager::new(

View File

@ -4,7 +4,6 @@ use bytes::Bytes;
use flowy_error::{internal_error, FlowyError};
use flowy_folder::event_map::FolderCouldServiceV1;
use flowy_sync::{
client_document::default::initial_document_str,
entities::{
document::{CreateDocumentParams, DocumentIdPB, DocumentPayloadPB, ResetDocumentParams},
ws_data::{ClientRevisionWSData, ClientRevisionWSDataType},
@ -422,15 +421,9 @@ impl DocumentCloudService for LocalServer {
fn fetch_document(
&self,
_token: &str,
params: DocumentIdPB,
_params: DocumentIdPB,
) -> FutureResult<Option<DocumentPayloadPB>, FlowyError> {
let doc = DocumentPayloadPB {
doc_id: params.value,
content: initial_document_str(),
rev_id: 0,
base_rev_id: 0,
};
FutureResult::new(async { Ok(Some(doc)) })
FutureResult::new(async { Ok(None) })
}
fn update_document_content(&self, _token: &str, _params: ResetDocumentParams) -> FutureResult<(), FlowyError> {

View File

@ -33,11 +33,11 @@ pub trait RevisionObjectDeserializer: Send + Sync {
}
pub trait RevisionObjectSerializer: Send + Sync {
/// Serialize the list of revisions to `Bytes`
/// Serialize a list of revisions into one in `Bytes` format
///
/// * `revisions`: a list of revisions will be serialized to `Bytes`
///
fn serialize_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes>;
fn combine_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes>;
}
/// `RevisionCompress` is used to compress multiple revisions into one revision
@ -62,11 +62,11 @@ pub trait RevisionCompress: Send + Sync {
let (base_rev_id, rev_id) = first_revision.pair_rev_id();
let md5 = last_revision.md5.clone();
let bytes = self.serialize_revisions(revisions)?;
let bytes = self.combine_revisions(revisions)?;
Ok(Revision::new(object_id, base_rev_id, rev_id, bytes, user_id, md5))
}
fn serialize_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes>;
fn combine_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes>;
}
pub struct RevisionManager {

View File

@ -2,7 +2,7 @@ use bytes::Bytes;
use flowy_database::ConnectionPool;
use flowy_document::{
errors::{internal_error, FlowyError},
DocumentCloudService, DocumentManager, DocumentUser,
DocumentCloudService, DocumentConfig, DocumentManager, DocumentUser,
};
use flowy_net::ClientServerConfiguration;
use flowy_net::{
@ -23,6 +23,7 @@ impl DocumentDepsResolver {
ws_conn: Arc<FlowyWebSocketConnect>,
user_session: Arc<UserSession>,
server_config: &ClientServerConfiguration,
document_config: &DocumentConfig,
) -> Arc<DocumentManager> {
let user = Arc::new(BlockUserImpl(user_session));
let rev_web_socket = Arc::new(DocumentRevisionWebSocket(ws_conn.clone()));
@ -31,7 +32,12 @@ impl DocumentDepsResolver {
Some(local_server) => local_server,
};
let manager = Arc::new(DocumentManager::new(cloud_service, user, rev_web_socket));
let manager = Arc::new(DocumentManager::new(
cloud_service,
user,
rev_web_socket,
document_config.clone(),
));
let receiver = Arc::new(DocumentWSMessageReceiverImpl(manager.clone()));
ws_conn.add_ws_message_receiver(receiver).unwrap();

View File

@ -17,7 +17,6 @@ use flowy_net::{
http_server::folder::FolderHttpCloudService, local_server::LocalServer, ws::connection::FlowyWebSocketConnect,
};
use flowy_revision::{RevisionWebSocket, WSStateReceiver};
use flowy_sync::client_document::default::initial_document_str;
use flowy_sync::entities::revision::{RepeatedRevision, Revision};
use flowy_sync::entities::ws_data::ClientRevisionWSData;
use flowy_user::services::UserSession;
@ -175,7 +174,7 @@ impl ViewDataProcessor for DocumentViewDataProcessor {
let manager = self.0.clone();
FutureResult::new(async move {
let editor = manager.open_document_editor(view_id).await?;
let delta_bytes = Bytes::from(editor.get_operation_str().await?);
let delta_bytes = Bytes::from(editor.get_operations_str().await?);
Ok(delta_bytes)
})
}
@ -190,8 +189,8 @@ impl ViewDataProcessor for DocumentViewDataProcessor {
let user_id = user_id.to_string();
let view_id = view_id.to_string();
let manager = self.0.clone();
let view_data = self.0.initial_document_content();
FutureResult::new(async move {
let view_data = initial_document_str();
let delta_data = Bytes::from(view_data);
let repeated_revision: RepeatedRevision =
Revision::initial_revision(&user_id, &view_id, delta_data.clone()).into();

View File

@ -3,7 +3,7 @@ pub mod module;
pub use flowy_net::get_client_server_configuration;
use crate::deps_resolve::*;
use flowy_document::DocumentManager;
use flowy_document::{DocumentConfig, DocumentManager};
use flowy_folder::{errors::FlowyError, manager::FolderManager};
use flowy_grid::manager::GridManager;
use flowy_net::ClientServerConfiguration;
@ -34,24 +34,28 @@ pub struct FlowySDKConfig {
root: String,
log_filter: String,
server_config: ClientServerConfiguration,
document_config: DocumentConfig,
}
impl fmt::Debug for FlowySDKConfig {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("FlowySDKConfig")
.field("root", &self.root)
.field("server_config", &self.server_config)
.field("server-config", &self.server_config)
.field("document-config", &self.document_config)
.finish()
}
}
impl FlowySDKConfig {
pub fn new(root: &str, server_config: ClientServerConfiguration, name: &str) -> Self {
pub fn new(root: &str, name: &str, server_config: ClientServerConfiguration, use_new_editor: bool) -> Self {
let document_config = DocumentConfig { use_new_editor };
FlowySDKConfig {
name: name.to_owned(),
root: root.to_owned(),
log_filter: crate_log_filter("info".to_owned()),
server_config,
document_config,
}
}
@ -89,7 +93,7 @@ pub struct FlowySDK {
#[allow(dead_code)]
config: FlowySDKConfig,
pub user_session: Arc<UserSession>,
pub text_block_manager: Arc<DocumentManager>,
pub document_manager: Arc<DocumentManager>,
pub folder_manager: Arc<FolderManager>,
pub grid_manager: Arc<GridManager>,
pub dispatcher: Arc<EventDispatcher>,
@ -106,11 +110,12 @@ impl FlowySDK {
let (local_server, ws_conn) = mk_local_server(&config.server_config);
let (user_session, text_block_manager, folder_manager, local_server, grid_manager) = runtime.block_on(async {
let user_session = mk_user_session(&config, &local_server, &config.server_config);
let text_block_manager = DocumentDepsResolver::resolve(
let document_manager = DocumentDepsResolver::resolve(
local_server.clone(),
ws_conn.clone(),
user_session.clone(),
&config.server_config,
&config.document_config,
);
let grid_manager = GridDepsResolver::resolve(ws_conn.clone(), user_session.clone()).await;
@ -120,7 +125,7 @@ impl FlowySDK {
user_session.clone(),
&config.server_config,
&ws_conn,
&text_block_manager,
&document_manager,
&grid_manager,
)
.await;
@ -131,7 +136,7 @@ impl FlowySDK {
ws_conn.init().await;
(
user_session,
text_block_manager,
document_manager,
folder_manager,
local_server,
grid_manager,
@ -153,7 +158,7 @@ impl FlowySDK {
Self {
config,
user_session,
text_block_manager,
document_manager: text_block_manager,
folder_manager,
grid_manager,
dispatcher,

View File

@ -10,6 +10,7 @@ flowy-sdk = { path = "../flowy-sdk", default-features = false }
flowy-user = { path = "../flowy-user"}
flowy-net = { path = "../flowy-net"}
flowy-folder = { path = "../flowy-folder", default-features = false}
flowy-document= { path = "../flowy-document", default-features = false}
lib-dispatch = { path = "../lib-dispatch" }
flowy-sync = { path = "../../../shared-lib/flowy-sync" }

View File

@ -46,7 +46,7 @@ impl ViewTest {
Self::new(sdk, ViewDataTypePB::Database, ViewLayoutTypePB::Board, data).await
}
pub async fn new_text_block_view(sdk: &FlowySDKTest) -> Self {
pub async fn new_document_view(sdk: &FlowySDKTest) -> Self {
Self::new(sdk, ViewDataTypePB::Text, ViewLayoutTypePB::Document, vec![]).await
}
}

View File

@ -2,7 +2,8 @@ pub mod event_builder;
pub mod helper;
use crate::helper::*;
use flowy_net::{get_client_server_configuration, ClientServerConfiguration};
use flowy_net::get_client_server_configuration;
use flowy_sdk::{FlowySDK, FlowySDKConfig};
use flowy_user::entities::UserProfilePB;
use nanoid::nanoid;
@ -27,16 +28,14 @@ impl std::ops::Deref for FlowySDKTest {
impl std::default::Default for FlowySDKTest {
fn default() -> Self {
let server_config = get_client_server_configuration().unwrap();
let sdk = Self::new(server_config);
std::mem::forget(sdk.dispatcher());
sdk
Self::new(false)
}
}
impl FlowySDKTest {
pub fn new(server_config: ClientServerConfiguration) -> Self {
let config = FlowySDKConfig::new(&root_dir(), server_config, &nanoid!(6)).log_filter("info");
pub fn new(use_new_editor: bool) -> Self {
let server_config = get_client_server_configuration().unwrap();
let config = FlowySDKConfig::new(&root_dir(), &nanoid!(6), server_config, use_new_editor).log_filter("info");
let sdk = std::thread::spawn(|| FlowySDK::new(config)).join().unwrap();
std::mem::forget(sdk.dispatcher());
Self { inner: sdk }

View File

@ -1,3 +1,6 @@
#![allow(clippy::all)]
#![allow(unused_attributes)]
#![allow(unused_assignments)]
use crate::{attr, ty_ext::*, AttrsContainer, Ctxt};
use syn::{self, punctuated::Punctuated};

View File

@ -126,6 +126,9 @@ pub enum ErrorCode {
#[display(fmt = "Invalid data")]
InvalidData = 1000,
#[display(fmt = "Serde")]
Serde = 1001,
#[display(fmt = "Out of bounds")]
OutOfBounds = 10001,
}

View File

@ -77,7 +77,7 @@ where
}
pub fn get_all_objects(&self) -> Vec<Arc<T>> {
self.inner.values().map(|map| map.all_objects()).flatten().collect()
self.inner.values().flat_map(|map| map.all_objects()).collect()
}
/// add object to the end of the list
@ -117,7 +117,7 @@ where
}
pub fn all_objects(&self) -> Vec<Arc<T>> {
self.object_by_field_type.values().cloned().flatten().collect()
self.object_by_field_type.values().flatten().cloned().collect()
}
}

View File

@ -1,14 +1,4 @@
use lib_ot::{core::OperationBuilder, text_delta::TextOperations};
#[inline]
pub fn initial_document_operations() -> TextOperations {
OperationBuilder::new().insert("\n").build()
}
#[inline]
pub fn initial_document_str() -> String {
initial_document_operations().json_str()
}
use lib_ot::text_delta::TextOperations;
#[inline]
pub fn initial_read_me() -> TextOperations {

View File

@ -1,4 +1,3 @@
use crate::client_document::default::initial_document_str;
use crate::{
client_document::{
history::{History, UndoResult},
@ -7,6 +6,7 @@ use crate::{
errors::CollaborateError,
};
use bytes::Bytes;
use lib_ot::text_delta::TextOperationBuilder;
use lib_ot::{core::*, text_delta::TextOperations};
use tokio::sync::mpsc;
@ -14,20 +14,24 @@ pub trait InitialDocument {
fn json_str() -> String;
}
pub struct EmptyDoc();
impl InitialDocument for EmptyDoc {
pub struct EmptyDocument();
impl InitialDocument for EmptyDocument {
fn json_str() -> String {
TextOperations::default().json_str()
}
}
pub struct NewlineDoc();
impl InitialDocument for NewlineDoc {
pub struct NewlineDocument();
impl InitialDocument for NewlineDocument {
fn json_str() -> String {
initial_document_str()
initial_old_document_content()
}
}
pub fn initial_old_document_content() -> String {
TextOperationBuilder::new().insert("\n").build().json_str()
}
pub struct ClientDocument {
operations: TextOperations,
history: History,
@ -206,7 +210,7 @@ impl ClientDocument {
pub fn is_empty(&self) -> bool {
// The document is empty if its text is equal to the initial text.
self.operations.json_str() == NewlineDoc::json_str()
self.operations.json_str() == NewlineDocument::json_str()
}
}

View File

@ -163,7 +163,7 @@ impl GridRevisionPad {
}
Some(field_rev) => {
let mut_field_rev = Arc::make_mut(field_rev);
let old_field_type_rev = mut_field_rev.ty.clone();
let old_field_type_rev = mut_field_rev.ty;
let old_field_type_option = mut_field_rev.get_type_option_str(mut_field_rev.ty);
match mut_field_rev.get_type_option_str(new_field_type) {
Some(new_field_type_option) => {

View File

@ -270,7 +270,7 @@ impl OpenDocumentHandler {
.send(msg)
.await
.map_err(|e| CollaborateError::internal().context(format!("Send document command failed: {}", e)))?;
Ok(rx.await.map_err(internal_error)?)
rx.await.map_err(internal_error)
}
}

View File

@ -241,7 +241,7 @@ impl OpenFolderHandler {
.send(msg)
.await
.map_err(|e| CollaborateError::internal().context(format!("Send folder command failed: {}", e)))?;
Ok(rx.await.map_err(internal_error)?)
rx.await.map_err(internal_error)
}
}

View File

@ -22,8 +22,7 @@ pub fn parse_protobuf_context_from(crate_paths: Vec<String>) -> Vec<ProtobufCrat
let files = crate_info
.proto_input_paths()
.iter()
.map(|proto_crate_path| parse_files_protobuf(proto_crate_path, &proto_output_path))
.flatten()
.flat_map(|proto_crate_path| parse_files_protobuf(proto_crate_path, &proto_output_path))
.collect::<Vec<ProtoFile>>();
ProtobufCrateContext::from_crate_info(crate_info, files)

View File

@ -182,9 +182,9 @@ pub fn check_pb_dart_plugin() {
));
}
msg.push_str(&"✅ You can fix that by adding:".to_string());
msg.push_str(&"\n\texport PATH=\"$PATH\":\"$HOME/.pub-cache/bin\"\n".to_string());
msg.push_str(&"to your shell's config file.(.bashrc, .bash, .profile, .zshrc etc.)".to_string());
msg.push_str("✅ You can fix that by adding:");
msg.push_str("\n\texport PATH=\"$PATH\":\"$HOME/.pub-cache/bin\"\n");
msg.push_str("to your shell's config file.(.bashrc, .bash, .profile, .zshrc etc.)");
panic!("{}", msg)
}
}
@ -198,13 +198,9 @@ fn gen_proto_files(crate_name: &str, crate_path: &str) -> Vec<ProtobufCrate> {
.map(|info| info.protobuf_crate.clone())
.collect::<Vec<_>>();
crate_context
.into_iter()
.map(|info| info.files)
.flatten()
.for_each(|file| {
println!("cargo:rerun-if-changed={}", file.file_path);
});
crate_context.into_iter().flat_map(|info| info.files).for_each(|file| {
println!("cargo:rerun-if-changed={}", file.file_path);
});
proto_crates
}

View File

@ -52,7 +52,7 @@ impl ProtoGenerator {
fn write_proto_files(crate_contexts: &[ProtobufCrateContext]) {
let file_path_content_map = crate_contexts
.iter()
.map(|ctx| {
.flat_map(|ctx| {
ctx.files
.iter()
.map(|file| {
@ -66,7 +66,6 @@ fn write_proto_files(crate_contexts: &[ProtobufCrateContext]) {
})
.collect::<HashMap<String, ProtoFileSymbol>>()
})
.flatten()
.collect::<HashMap<String, ProtoFileSymbol>>();
for context in crate_contexts {
@ -152,12 +151,11 @@ impl ProtoCache {
fn from_crate_contexts(crate_contexts: &[ProtobufCrateContext]) -> Self {
let proto_files = crate_contexts
.iter()
.map(|crate_info| &crate_info.files)
.flatten()
.flat_map(|crate_info| &crate_info.files)
.collect::<Vec<&ProtoFile>>();
let structs: Vec<String> = proto_files.iter().map(|info| info.structs.clone()).flatten().collect();
let enums: Vec<String> = proto_files.iter().map(|info| info.enums.clone()).flatten().collect();
let structs: Vec<String> = proto_files.iter().flat_map(|info| info.structs.clone()).collect();
let enums: Vec<String> = proto_files.iter().flat_map(|info| info.enums.clone()).collect();
Self { structs, enums }
}
}

View File

@ -8,8 +8,6 @@ edition = "2018"
[dependencies]
bytecount = "0.6.0"
serde = { version = "1.0", features = ["derive", "rc"] }
#flowy-revision = { path = "../../frontend/rust-lib/flowy-revision" }
#protobuf = {version = "2.18.0"}
tokio = { version = "1", features = ["sync"] }
dashmap = "5"
md5 = "0.7.0"

View File

@ -48,6 +48,10 @@ impl AttributeHashMap {
AttributeHashMap(HashMap::new())
}
pub fn into_inner(self) -> HashMap<AttributeKey, AttributeValue> {
self.0
}
pub fn from_value(attribute_map: HashMap<AttributeKey, AttributeValue>) -> Self {
Self(attribute_map)
}

View File

@ -84,7 +84,7 @@ where
let map: T = map.next_value()?;
attributes = Some(map);
}
_ => panic!(),
_ => {}
}
}
match operation {

View File

@ -1,6 +1,6 @@
use crate::core::delta::operation::OperationAttributes;
use crate::core::delta::DeltaOperations;
use serde::de::DeserializeOwned;
use serde::{
de::{SeqAccess, Visitor},
ser::SerializeSeq,

View File

@ -1,3 +1,4 @@
use serde::{Deserialize, Serialize};
use std::{
cmp::{max, min},
fmt,
@ -9,7 +10,7 @@ use std::{
///
/// It is an invariant that `start <= end`. An interval where `end < start` is
/// considered empty.
#[derive(Clone, Copy, PartialEq, Eq)]
#[derive(Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub struct Interval {
pub start: usize,
pub end: usize,

View File

@ -6,10 +6,15 @@ mod operation;
mod operation_serde;
mod path;
mod transaction;
mod transaction_serde;
mod tree;
mod tree_serde;
pub use node::*;
pub use operation::*;
pub use path::*;
pub use transaction::*;
pub use tree::*;
pub use tree_serde::*;
pub use indextree::NodeId;

View File

@ -1,6 +1,6 @@
use super::node_serde::*;
use crate::core::attributes::{AttributeHashMap, AttributeKey, AttributeValue};
use crate::core::NodeBody::Delta;
use crate::core::Body::Delta;
use crate::core::OperationTransform;
use crate::errors::OTError;
use crate::text_delta::TextOperations;
@ -17,9 +17,9 @@ pub struct NodeData {
#[serde(serialize_with = "serialize_body")]
#[serde(deserialize_with = "deserialize_body")]
#[serde(skip_serializing_if = "NodeBody::is_empty")]
#[serde(skip_serializing_if = "Body::is_empty")]
#[serde(default)]
pub body: NodeBody,
pub body: Body,
#[serde(skip_serializing_if = "Vec::is_empty")]
#[serde(default)]
@ -45,6 +45,9 @@ impl NodeData {
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RepeatedNodeData(Vec<NodeData>);
/// Builder for [`NodeData`]
pub struct NodeDataBuilder {
node: NodeData,
@ -58,7 +61,7 @@ impl NodeDataBuilder {
}
/// Appends a new node to the end of the builder's node children.
pub fn add_node(mut self, node: NodeData) -> Self {
pub fn add_node_data(mut self, node: NodeData) -> Self {
self.node.children.push(node);
self
}
@ -72,7 +75,7 @@ impl NodeDataBuilder {
}
/// Inserts a body to the builder's node
pub fn insert_body(mut self, body: NodeBody) -> Self {
pub fn insert_body(mut self, body: Body) -> Self {
self.node.body = body;
self
}
@ -92,24 +95,24 @@ impl NodeDataBuilder {
/// compose, transform and invert.
///
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum NodeBody {
pub enum Body {
Empty,
Delta(TextOperations),
}
impl std::default::Default for NodeBody {
impl std::default::Default for Body {
fn default() -> Self {
NodeBody::Empty
Body::Empty
}
}
impl NodeBody {
impl Body {
fn is_empty(&self) -> bool {
matches!(self, NodeBody::Empty)
matches!(self, Body::Empty)
}
}
impl OperationTransform for NodeBody {
impl OperationTransform for Body {
/// Only the same enum variant can perform the compose operation.
fn compose(&self, other: &Self) -> Result<Self, OTError>
where
@ -117,7 +120,7 @@ impl OperationTransform for NodeBody {
{
match (self, other) {
(Delta(a), Delta(b)) => a.compose(b).map(Delta),
(NodeBody::Empty, NodeBody::Empty) => Ok(NodeBody::Empty),
(Body::Empty, Body::Empty) => Ok(Body::Empty),
(l, r) => {
let msg = format!("{:?} can not compose {:?}", l, r);
Err(OTError::internal().context(msg))
@ -132,7 +135,7 @@ impl OperationTransform for NodeBody {
{
match (self, other) {
(Delta(l), Delta(r)) => l.transform(r).map(|(ta, tb)| (Delta(ta), Delta(tb))),
(NodeBody::Empty, NodeBody::Empty) => Ok((NodeBody::Empty, NodeBody::Empty)),
(Body::Empty, Body::Empty) => Ok((Body::Empty, Body::Empty)),
(l, r) => {
let msg = format!("{:?} can not compose {:?}", l, r);
Err(OTError::internal().context(msg))
@ -144,7 +147,7 @@ impl OperationTransform for NodeBody {
fn invert(&self, other: &Self) -> Self {
match (self, other) {
(Delta(l), Delta(r)) => Delta(l.invert(r)),
(NodeBody::Empty, NodeBody::Empty) => NodeBody::Empty,
(Body::Empty, Body::Empty) => Body::Empty,
(l, r) => {
tracing::error!("{:?} can not compose {:?}", l, r);
l.clone()
@ -158,20 +161,28 @@ impl OperationTransform for NodeBody {
/// Each NodeBody except the Empty should have its corresponding changeset variant.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum NodeBodyChangeset {
pub enum Changeset {
Delta {
delta: TextOperations,
inverted: TextOperations,
},
Attributes {
new: AttributeHashMap,
old: AttributeHashMap,
},
}
impl NodeBodyChangeset {
pub fn inverted(&self) -> NodeBodyChangeset {
impl Changeset {
pub fn inverted(&self) -> Changeset {
match self {
NodeBodyChangeset::Delta { delta, inverted } => NodeBodyChangeset::Delta {
Changeset::Delta { delta, inverted } => Changeset::Delta {
delta: inverted.clone(),
inverted: delta.clone(),
},
Changeset::Attributes { new, old } => Changeset::Attributes {
new: old.clone(),
old: new.clone(),
},
}
}
}
@ -181,7 +192,7 @@ impl NodeBodyChangeset {
#[derive(Clone, Eq, PartialEq, Debug)]
pub struct Node {
pub node_type: String,
pub body: NodeBody,
pub body: Body,
pub attributes: AttributeHashMap,
}
@ -190,16 +201,22 @@ impl Node {
Node {
node_type: node_type.into(),
attributes: AttributeHashMap::new(),
body: NodeBody::Empty,
body: Body::Empty,
}
}
pub fn apply_body_changeset(&mut self, changeset: NodeBodyChangeset) {
pub fn apply_changeset(&mut self, changeset: Changeset) -> Result<(), OTError> {
match changeset {
NodeBodyChangeset::Delta { delta, inverted: _ } => match self.body.compose(&Delta(delta)) {
Ok(new_body) => self.body = new_body,
Err(e) => tracing::error!("{:?}", e),
},
Changeset::Delta { delta, inverted: _ } => {
let new_body = self.body.compose(&Delta(delta))?;
self.body = new_body;
Ok(())
}
Changeset::Attributes { new, old: _ } => {
let new_attributes = AttributeHashMap::compose(&self.attributes, &new)?;
self.attributes = new_attributes;
Ok(())
}
}
}
}

View File

@ -1,18 +1,18 @@
use super::NodeBody;
use super::Body;
use crate::text_delta::TextOperations;
use serde::de::{self, MapAccess, Visitor};
use serde::ser::SerializeMap;
use serde::{Deserializer, Serializer};
use std::fmt;
pub fn serialize_body<S>(body: &NodeBody, serializer: S) -> Result<S::Ok, S::Error>
pub fn serialize_body<S>(body: &Body, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut map = serializer.serialize_map(Some(3))?;
match body {
NodeBody::Empty => {}
NodeBody::Delta(delta) => {
Body::Empty => {}
Body::Delta(delta) => {
map.serialize_key("delta")?;
map.serialize_value(delta)?;
}
@ -20,31 +20,19 @@ where
map.end()
}
pub fn deserialize_body<'de, D>(deserializer: D) -> Result<NodeBody, D::Error>
pub fn deserialize_body<'de, D>(deserializer: D) -> Result<Body, D::Error>
where
D: Deserializer<'de>,
{
struct NodeBodyVisitor();
impl<'de> Visitor<'de> for NodeBodyVisitor {
type Value = NodeBody;
type Value = Body;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("Expect NodeBody")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: de::SeqAccess<'de>,
{
let mut delta = TextOperations::default();
while let Some(op) = seq.next_element()? {
delta.add(op);
}
Ok(NodeBody::Delta(delta))
}
#[inline]
fn visit_map<V>(self, mut map: V) -> Result<Self::Value, V::Error>
where
V: MapAccess<'de>,
@ -65,7 +53,7 @@ where
}
if let Some(delta) = delta {
return Ok(NodeBody::Delta(delta));
return Ok(Body::Delta(delta));
}
Err(de::Error::missing_field("delta"))

View File

@ -1,8 +1,8 @@
use crate::core::attributes::AttributeHashMap;
use crate::core::{NodeBodyChangeset, NodeData, Path};
use super::operation_serde::{deserialize_changeset, serialize_changeset};
use crate::core::{Changeset, NodeData, Path};
use crate::errors::OTError;
use serde::{Deserialize, Serialize};
use std::rc::Rc;
use std::sync::Arc;
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "op")]
@ -10,17 +10,10 @@ pub enum NodeOperation {
#[serde(rename = "insert")]
Insert { path: Path, nodes: Vec<NodeData> },
#[serde(rename = "update-attribute")]
UpdateAttributes {
path: Path,
new: AttributeHashMap,
old: AttributeHashMap,
},
#[serde(rename = "update-body")]
// #[serde(serialize_with = "serialize_edit_body")]
// #[serde(deserialize_with = "deserialize_edit_body")]
UpdateBody { path: Path, changeset: NodeBodyChangeset },
#[serde(rename = "update")]
#[serde(serialize_with = "serialize_changeset")]
#[serde(deserialize_with = "deserialize_changeset")]
Update { path: Path, changeset: Changeset },
#[serde(rename = "delete")]
Delete { path: Path, nodes: Vec<NodeData> },
@ -30,18 +23,16 @@ impl NodeOperation {
pub fn get_path(&self) -> &Path {
match self {
NodeOperation::Insert { path, .. } => path,
NodeOperation::UpdateAttributes { path, .. } => path,
NodeOperation::Delete { path, .. } => path,
NodeOperation::UpdateBody { path, .. } => path,
NodeOperation::Update { path, .. } => path,
}
}
pub fn get_mut_path(&mut self) -> &mut Path {
match self {
NodeOperation::Insert { path, .. } => path,
NodeOperation::UpdateAttributes { path, .. } => path,
NodeOperation::Delete { path, .. } => path,
NodeOperation::UpdateBody { path, .. } => path,
NodeOperation::Update { path, .. } => path,
}
}
@ -51,20 +42,11 @@ impl NodeOperation {
path: path.clone(),
nodes: nodes.clone(),
},
NodeOperation::UpdateAttributes {
path,
new: attributes,
old: old_attributes,
} => NodeOperation::UpdateAttributes {
path: path.clone(),
new: old_attributes.clone(),
old: attributes.clone(),
},
NodeOperation::Delete { path, nodes } => NodeOperation::Insert {
path: path.clone(),
nodes: nodes.clone(),
},
NodeOperation::UpdateBody { path, changeset: body } => NodeOperation::UpdateBody {
NodeOperation::Update { path, changeset: body } => NodeOperation::Update {
path: path.clone(),
changeset: body.inverted(),
},
@ -101,7 +83,7 @@ impl NodeOperation {
///
/// op_1.transform(&mut op_2);
/// assert_eq!(serde_json::to_string(&op_2).unwrap(), r#"{"op":"insert","path":[0,2],"nodes":[{"type":"text_2"}]}"#);
///
/// assert_eq!(serde_json::to_string(&op_1).unwrap(), r#"{"op":"insert","path":[0,1],"nodes":[{"type":"text_1"}]}"#);
/// ```
pub fn transform(&self, other: &mut NodeOperation) {
match self {
@ -122,21 +104,21 @@ impl NodeOperation {
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct NodeOperations {
operations: Vec<Rc<NodeOperation>>,
operations: Vec<Arc<NodeOperation>>,
}
impl NodeOperations {
pub fn into_inner(self) -> Vec<Rc<NodeOperation>> {
pub fn into_inner(self) -> Vec<Arc<NodeOperation>> {
self.operations
}
pub fn add_op(&mut self, operation: NodeOperation) {
self.operations.push(Rc::new(operation));
self.operations.push(Arc::new(operation));
}
}
impl std::ops::Deref for NodeOperations {
type Target = Vec<Rc<NodeOperation>>;
type Target = Vec<Arc<NodeOperation>>;
fn deref(&self) -> &Self::Target {
&self.operations
@ -158,7 +140,7 @@ impl std::convert::From<Vec<NodeOperation>> for NodeOperations {
impl NodeOperations {
pub fn new(operations: Vec<NodeOperation>) -> Self {
Self {
operations: operations.into_iter().map(Rc::new).collect(),
operations: operations.into_iter().map(Arc::new).collect(),
}
}

View File

@ -1,4 +1,4 @@
use crate::core::{NodeBodyChangeset, Path};
use crate::core::{Changeset, Path};
use crate::text_delta::TextOperations;
use serde::de::{self, MapAccess, Visitor};
use serde::ser::SerializeMap;
@ -7,8 +7,7 @@ use std::convert::TryInto;
use std::fmt;
use std::marker::PhantomData;
#[allow(dead_code)]
pub fn serialize_edit_body<S>(path: &Path, changeset: &NodeBodyChangeset, serializer: S) -> Result<S::Ok, S::Error>
pub fn serialize_changeset<S>(path: &Path, changeset: &Changeset, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
@ -17,28 +16,34 @@ where
map.serialize_value(path)?;
match changeset {
NodeBodyChangeset::Delta { delta, inverted } => {
Changeset::Delta { delta, inverted } => {
map.serialize_key("delta")?;
map.serialize_value(delta)?;
map.serialize_key("inverted")?;
map.serialize_value(inverted)?;
map.end()
}
Changeset::Attributes { new, old } => {
map.serialize_key("new")?;
map.serialize_value(new)?;
map.serialize_key("old")?;
map.serialize_value(old)?;
map.end()
}
}
}
#[allow(dead_code)]
pub fn deserialize_edit_body<'de, D>(deserializer: D) -> Result<(Path, NodeBodyChangeset), D::Error>
pub fn deserialize_changeset<'de, D>(deserializer: D) -> Result<(Path, Changeset), D::Error>
where
D: Deserializer<'de>,
{
struct NodeBodyChangesetVisitor();
struct ChangesetVisitor();
impl<'de> Visitor<'de> for NodeBodyChangesetVisitor {
type Value = (Path, NodeBodyChangeset);
impl<'de> Visitor<'de> for ChangesetVisitor {
type Value = (Path, Changeset);
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("Expect Path and NodeBodyChangeset")
formatter.write_str("Expect Path and Changeset")
}
#[inline]
@ -47,7 +52,7 @@ where
V: MapAccess<'de>,
{
let mut path: Option<Path> = None;
let mut delta_changeset = DeltaBodyChangeset::<V::Error>::new();
let mut delta_changeset = DeltaChangeset::<V::Error>::new();
while let Some(key) = map.next_key()? {
match key {
"delta" => {
@ -66,7 +71,6 @@ where
if path.is_some() {
return Err(de::Error::duplicate_field("path"));
}
path = Some(map.next_value::<Path>()?)
}
other => {
@ -83,17 +87,16 @@ where
Ok((path.unwrap(), changeset))
}
}
deserializer.deserialize_any(NodeBodyChangesetVisitor())
deserializer.deserialize_any(ChangesetVisitor())
}
#[allow(dead_code)]
struct DeltaBodyChangeset<E> {
struct DeltaChangeset<E> {
delta: Option<TextOperations>,
inverted: Option<TextOperations>,
error: PhantomData<E>,
}
impl<E> DeltaBodyChangeset<E> {
impl<E> DeltaChangeset<E> {
fn new() -> Self {
Self {
delta: None,
@ -103,13 +106,13 @@ impl<E> DeltaBodyChangeset<E> {
}
}
impl<E> std::convert::TryInto<NodeBodyChangeset> for DeltaBodyChangeset<E>
impl<E> std::convert::TryInto<Changeset> for DeltaChangeset<E>
where
E: de::Error,
{
type Error = E;
fn try_into(self) -> Result<NodeBodyChangeset, Self::Error> {
fn try_into(self) -> Result<Changeset, Self::Error> {
if self.delta.is_none() {
return Err(de::Error::missing_field("delta"));
}
@ -117,7 +120,7 @@ where
if self.inverted.is_none() {
return Err(de::Error::missing_field("inverted"));
}
let changeset = NodeBodyChangeset::Delta {
let changeset = Changeset::Delta {
delta: self.delta.unwrap(),
inverted: self.inverted.unwrap(),
};

View File

@ -26,6 +26,19 @@ use serde::{Deserialize, Serialize};
#[derive(Clone, Serialize, Deserialize, Eq, PartialEq, Debug)]
pub struct Path(pub Vec<usize>);
impl Path {
pub fn is_valid(&self) -> bool {
if self.is_empty() {
return false;
}
return true;
}
pub fn is_root(&self) -> bool {
return self.0.len() == 1 && self.0[0] == 0;
}
}
impl std::ops::Deref for Path {
type Target = Vec<usize>;

View File

@ -1,14 +1,21 @@
use super::{Changeset, NodeOperations};
use crate::core::attributes::AttributeHashMap;
use crate::core::{NodeData, NodeOperation, NodeTree, Path};
use crate::core::{Interval, NodeData, NodeOperation, NodeTree, Path};
use crate::errors::OTError;
use indextree::NodeId;
use std::rc::Rc;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use super::{NodeBodyChangeset, NodeOperations};
#[derive(Debug, Clone, Default)]
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct Transaction {
operations: NodeOperations,
#[serde(flatten)]
pub operations: NodeOperations,
#[serde(default)]
#[serde(flatten)]
#[serde(skip_serializing_if = "Extension::is_empty")]
pub extension: Extension,
}
impl Transaction {
@ -19,10 +26,25 @@ impl Transaction {
pub fn from_operations<T: Into<NodeOperations>>(operations: T) -> Self {
Self {
operations: operations.into(),
extension: Extension::Empty,
}
}
pub fn into_operations(self) -> Vec<Rc<NodeOperation>> {
pub fn from_bytes(bytes: &[u8]) -> Result<Self, OTError> {
let transaction = serde_json::from_slice(bytes).map_err(|err| OTError::serde().context(err))?;
Ok(transaction)
}
pub fn to_bytes(&self) -> Result<Vec<u8>, OTError> {
let bytes = serde_json::to_vec(&self).map_err(|err| OTError::serde().context(err))?;
Ok(bytes)
}
pub fn to_json(&self) -> Result<String, OTError> {
serde_json::to_string(&self).map_err(|err| OTError::serde().context(err))
}
pub fn into_operations(self) -> Vec<Arc<NodeOperation>> {
self.operations.into_inner()
}
@ -32,8 +54,9 @@ impl Transaction {
/// the operations of the transaction will be transformed into the conflict operations.
pub fn transform(&self, other: &Transaction) -> Result<Transaction, OTError> {
let mut new_transaction = other.clone();
new_transaction.extension = self.extension.clone();
for other_operation in new_transaction.iter_mut() {
let other_operation = Rc::make_mut(other_operation);
let other_operation = Arc::make_mut(other_operation);
for operation in self.operations.iter() {
operation.transform(other_operation);
}
@ -41,17 +64,19 @@ impl Transaction {
Ok(new_transaction)
}
pub fn compose(&mut self, other: &Transaction) -> Result<(), OTError> {
pub fn compose(&mut self, other: Transaction) -> Result<(), OTError> {
// For the moment, just append `other` operations to the end of `self`.
for operation in other.operations.iter() {
self.operations.push(operation.clone());
let Transaction { operations, extension } = other;
for operation in operations.into_inner().into_iter() {
self.operations.push(operation);
}
self.extension = extension;
Ok(())
}
}
impl std::ops::Deref for Transaction {
type Target = Vec<Rc<NodeOperation>>;
type Target = Vec<Arc<NodeOperation>>;
fn deref(&self) -> &Self::Target {
&self.operations
@ -64,6 +89,27 @@ impl std::ops::DerefMut for Transaction {
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum Extension {
Empty,
TextSelection {
before_selection: Interval,
after_selection: Interval,
},
}
impl std::default::Default for Extension {
fn default() -> Self {
Extension::Empty
}
}
impl Extension {
fn is_empty(&self) -> bool {
matches!(self, Extension::Empty)
}
}
pub struct TransactionBuilder<'a> {
node_tree: &'a NodeTree,
operations: NodeOperations,
@ -87,17 +133,15 @@ impl<'a> TransactionBuilder<'a> {
/// # Examples
///
/// ```
/// // -- 0 (root)
/// // 0 -- text_1
/// // 1 -- text_2
/// // 0 -- text_1
/// use lib_ot::core::{NodeTree, NodeData, TransactionBuilder};
/// let mut node_tree = NodeTree::new("root");
/// let mut node_tree = NodeTree::default();
/// let transaction = TransactionBuilder::new(&node_tree)
/// .insert_nodes_at_path(0,vec![ NodeData::new("text_1"), NodeData::new("text_2")])
/// .insert_nodes_at_path(0,vec![ NodeData::new("text_1")])
/// .finalize();
/// node_tree.apply_transaction(transaction).unwrap();
///
/// node_tree.node_id_at_path(vec![0, 0]);
/// node_tree.node_id_at_path(vec![0]).unwrap();
/// ```
///
pub fn insert_nodes_at_path<T: Into<Path>>(self, path: T, nodes: Vec<NodeData>) -> Self {
@ -121,7 +165,7 @@ impl<'a> TransactionBuilder<'a> {
/// // -- 0
/// // |-- text
/// use lib_ot::core::{NodeTree, NodeData, TransactionBuilder};
/// let mut node_tree = NodeTree::new("root");
/// let mut node_tree = NodeTree::default();
/// let transaction = TransactionBuilder::new(&node_tree)
/// .insert_node_at_path(0, NodeData::new("text"))
/// .finalize();
@ -143,10 +187,12 @@ impl<'a> TransactionBuilder<'a> {
}
}
self.operations.add_op(NodeOperation::UpdateAttributes {
self.operations.add_op(NodeOperation::Update {
path: path.clone(),
new: attributes,
old: old_attributes,
changeset: Changeset::Attributes {
new: attributes,
old: old_attributes,
},
});
}
None => tracing::warn!("Update attributes at path: {:?} failed. Node is not exist", path),
@ -154,10 +200,10 @@ impl<'a> TransactionBuilder<'a> {
self
}
pub fn update_body_at_path(mut self, path: &Path, changeset: NodeBodyChangeset) -> Self {
pub fn update_body_at_path(mut self, path: &Path, changeset: Changeset) -> Self {
match self.node_tree.node_id_at_path(path) {
Some(_) => {
self.operations.add_op(NodeOperation::UpdateBody {
self.operations.add_op(NodeOperation::Update {
path: path.clone(),
changeset,
});
@ -172,11 +218,17 @@ impl<'a> TransactionBuilder<'a> {
}
pub fn delete_nodes_at_path(mut self, path: &Path, length: usize) -> Self {
let mut node = self.node_tree.node_id_at_path(path).unwrap();
let node_id = self.node_tree.node_id_at_path(path);
if node_id.is_none() {
tracing::warn!("Path: {:?} doesn't contains any nodes", path);
return self;
}
let mut node_id = node_id.unwrap();
let mut deleted_nodes = vec![];
for _ in 0..length {
deleted_nodes.push(self.get_deleted_nodes(node));
node = self.node_tree.following_siblings(node).next().unwrap();
deleted_nodes.push(self.get_deleted_node_data(node_id));
node_id = self.node_tree.following_siblings(node_id).next().unwrap();
}
self.operations.add_op(NodeOperation::Delete {
@ -186,13 +238,16 @@ impl<'a> TransactionBuilder<'a> {
self
}
fn get_deleted_nodes(&self, node_id: NodeId) -> NodeData {
fn get_deleted_node_data(&self, node_id: NodeId) -> NodeData {
let node_data = self.node_tree.get_node(node_id).unwrap();
let mut children = vec![];
self.node_tree.children_from_node(node_id).for_each(|child_id| {
children.push(self.get_deleted_nodes(child_id));
});
self.node_tree
.get_children_ids(node_id)
.into_iter()
.for_each(|child_id| {
children.push(self.get_deleted_node_data(child_id));
});
NodeData {
node_type: node_data.node_type.clone(),

View File

@ -0,0 +1,29 @@
use crate::core::Extension;
use serde::ser::SerializeMap;
use serde::Serializer;
#[allow(dead_code)]
pub fn serialize_extension<S>(extension: &Extension, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match extension {
Extension::Empty => {
let map = serializer.serialize_map(None)?;
map.end()
}
Extension::TextSelection {
before_selection,
after_selection,
} => {
let mut map = serializer.serialize_map(Some(2))?;
map.serialize_key("before_selection")?;
map.serialize_value(before_selection)?;
map.serialize_key("after_selection")?;
map.serialize_value(after_selection)?;
map.end()
}
}
}

View File

@ -1,37 +1,45 @@
use crate::core::attributes::AttributeHashMap;
use crate::core::{Node, NodeBodyChangeset, NodeData, NodeOperation, OperationTransform, Path, Transaction};
use crate::errors::{ErrorBuilder, OTError, OTErrorCode};
use indextree::{Arena, Children, FollowingSiblings, NodeId};
use std::rc::Rc;
use super::NodeOperations;
use crate::core::{Changeset, Node, NodeData, NodeOperation, Path, Transaction};
use crate::errors::{ErrorBuilder, OTError, OTErrorCode};
use indextree::{Arena, FollowingSiblings, NodeId};
use std::sync::Arc;
///
#[derive(Default, Debug)]
pub struct NodeTreeContext {}
#[derive(Debug)]
pub struct NodeTree {
arena: Arena<Node>,
root: NodeId,
pub context: NodeTreeContext,
}
impl Default for NodeTree {
fn default() -> Self {
Self::new("root")
Self::new(NodeTreeContext::default())
}
}
impl NodeTree {
pub fn new(root_name: &str) -> NodeTree {
pub fn new(context: NodeTreeContext) -> NodeTree {
let mut arena = Arena::new();
let root = arena.new_node(Node::new(root_name));
NodeTree { arena, root }
let root = arena.new_node(Node::new("root"));
NodeTree { arena, root, context }
}
pub fn from_bytes(root_name: &str, bytes: Vec<u8>) -> Result<Self, OTError> {
pub fn from_node_data(node_data: NodeData, context: NodeTreeContext) -> Result<Self, OTError> {
let mut tree = Self::new(context);
let _ = tree.insert_nodes(&0_usize.into(), vec![node_data])?;
Ok(tree)
}
pub fn from_bytes(bytes: Vec<u8>, context: NodeTreeContext) -> Result<Self, OTError> {
let operations = NodeOperations::from_bytes(bytes)?;
Self::from_operations(root_name, operations)
Self::from_operations(operations, context)
}
pub fn from_operations(root_name: &str, operations: NodeOperations) -> Result<Self, OTError> {
let mut node_tree = NodeTree::new(root_name);
pub fn from_operations(operations: NodeOperations, context: NodeTreeContext) -> Result<Self, OTError> {
let mut node_tree = NodeTree::new(context);
for operation in operations.into_inner().into_iter() {
let _ = node_tree.apply_op(operation)?;
}
@ -39,13 +47,75 @@ impl NodeTree {
}
pub fn get_node(&self, node_id: NodeId) -> Option<&Node> {
if node_id.is_removed(&self.arena) {
return None;
}
Some(self.arena.get(node_id)?.get())
}
pub fn get_node_at_path(&self, path: &Path) -> Option<&Node> {
{
let node_id = self.node_id_at_path(path)?;
self.get_node(node_id)
let node_id = self.node_id_at_path(path)?;
self.get_node(node_id)
}
pub fn get_node_data_at_path(&self, path: &Path) -> Option<NodeData> {
let node_id = self.node_id_at_path(path)?;
let node_data = self.get_node_data(node_id)?;
Some(node_data)
}
pub fn get_node_data_at_root(&self) -> Option<NodeData> {
self.get_node_data(self.root)
}
pub fn get_node_data(&self, node_id: NodeId) -> Option<NodeData> {
let Node {
node_type,
body,
attributes,
} = self.get_node(node_id)?.clone();
let mut node_data = NodeData::new(node_type);
for (key, value) in attributes.into_inner() {
node_data.attributes.insert(key, value);
}
node_data.body = body;
let children = self.get_children_ids(node_id);
for child in children.into_iter() {
if let Some(child_node_data) = self.get_node_data(child) {
node_data.children.push(child_node_data);
}
}
Some(node_data)
}
pub fn root_node_id(&self) -> NodeId {
self.root
}
pub fn get_children(&self, node_id: NodeId) -> Vec<&Node> {
node_id
.children(&self.arena)
.flat_map(|node_id| self.get_node(node_id))
.collect()
}
/// Returns a iterator used to iterate over the node ids whose parent node id is node_id
///
/// * `node_id`: the children's parent node id
///
pub fn get_children_ids(&self, node_id: NodeId) -> Vec<NodeId> {
node_id.children(&self.arena).collect()
}
/// Serialize the node to JSON with node_id
pub fn serialize_node(&self, node_id: NodeId, pretty_json: bool) -> Result<String, OTError> {
let node_data = self
.get_node_data(node_id)
.ok_or_else(|| OTError::internal().context("Node doesn't exist exist"))?;
if pretty_json {
serde_json::to_string_pretty(&node_data).map_err(|err| OTError::serde().context(err))
} else {
serde_json::to_string(&node_data).map_err(|err| OTError::serde().context(err))
}
}
@ -53,28 +123,32 @@ impl NodeTree {
/// # Examples
///
/// ```
/// use std::rc::Rc;
/// use std::sync::Arc;
/// use lib_ot::core::{NodeOperation, NodeTree, NodeData, Path};
/// let nodes = vec![NodeData::new("text".to_string())];
/// let root_path: Path = vec![0].into();
/// let op = NodeOperation::Insert {path: root_path.clone(),nodes };
///
/// let mut node_tree = NodeTree::new("root");
/// node_tree.apply_op(Rc::new(op)).unwrap();
/// let mut node_tree = NodeTree::default();
/// node_tree.apply_op(Arc::new(op)).unwrap();
/// let node_id = node_tree.node_id_at_path(&root_path).unwrap();
/// let node_path = node_tree.path_from_node_id(node_id);
/// debug_assert_eq!(node_path, root_path);
/// ```
pub fn node_id_at_path<T: Into<Path>>(&self, path: T) -> Option<NodeId> {
let path = path.into();
if path.is_empty() {
return Some(self.root);
if !path.is_valid() {
return None;
}
let mut iterate_node = self.root;
for id in path.iter() {
iterate_node = self.child_from_node_at_index(iterate_node, *id)?;
}
if iterate_node.is_removed(&self.arena) {
return None;
}
Some(iterate_node)
}
@ -105,7 +179,7 @@ impl NodeTree {
counter
}
/// Returns the note_id at the position of the tree with id note_id
/// Returns the note_id at the index of the tree which its id is note_id
/// # Arguments
///
/// * `node_id`: the node id of the child's parent
@ -116,14 +190,14 @@ impl NodeTree {
/// # Examples
///
/// ```
/// use std::rc::Rc;
/// use std::sync::Arc;
/// use lib_ot::core::{NodeOperation, NodeTree, NodeData, Path};
/// let node_1 = NodeData::new("text".to_string());
/// let inserted_path: Path = vec![0].into();
///
/// let mut node_tree = NodeTree::new("root");
/// let mut node_tree = NodeTree::default();
/// let op = NodeOperation::Insert {path: inserted_path.clone(),nodes: vec![node_1.clone()] };
/// node_tree.apply_op(Rc::new(op)).unwrap();
/// node_tree.apply_op(Arc::new(op)).unwrap();
///
/// let node_2 = node_tree.get_node_at_path(&inserted_path).unwrap();
/// assert_eq!(node_2.node_type, node_1.node_type);
@ -139,14 +213,6 @@ impl NodeTree {
None
}
/// Returns all children whose parent node id is node_id
///
/// * `node_id`: the children's parent node id
///
pub fn children_from_node(&self, node_id: NodeId) -> Children<'_, Node> {
node_id.children(&self.arena)
}
///
/// # Arguments
///
@ -173,36 +239,59 @@ impl NodeTree {
Ok(())
}
pub fn apply_op(&mut self, op: Rc<NodeOperation>) -> Result<(), OTError> {
let op = match Rc::try_unwrap(op) {
pub fn apply_op(&mut self, op: Arc<NodeOperation>) -> Result<(), OTError> {
let op = match Arc::try_unwrap(op) {
Ok(op) => op,
Err(op) => op.as_ref().clone(),
};
match op {
NodeOperation::Insert { path, nodes } => self.insert_nodes(&path, nodes),
NodeOperation::UpdateAttributes { path, new, .. } => self.update_attributes(&path, new),
NodeOperation::UpdateBody { path, changeset } => self.update_body(&path, changeset),
NodeOperation::Delete { path, nodes } => self.delete_node(&path, nodes),
NodeOperation::Update { path, changeset } => self.update(&path, changeset),
NodeOperation::Delete { path, nodes: _ } => self.delete_node(&path),
}
}
/// Inserts nodes at given path
/// root
/// 0 - A
/// 0 - A1
/// 1 - B
/// 0 - B1
/// 1 - B2
///
/// The path of each node will be:
/// A: [0]
/// A1: [0,0]
/// B: [1]
/// B1: [1,0]
/// B2: [1,1]
///
/// When inserting multiple nodes into the same path, each of them will be appended to the root
/// node. For example. The path is [0] and the nodes are [A, B, C]. After inserting the nodes,
/// the tree will be:
/// root
/// 0: A
/// 1: B
/// 2: C
///
/// returns error if the path is empty
///
fn insert_nodes(&mut self, path: &Path, nodes: Vec<NodeData>) -> Result<(), OTError> {
debug_assert!(!path.is_empty());
if path.is_empty() {
return Err(OTErrorCode::PathIsEmpty.into());
if !path.is_valid() {
return Err(OTErrorCode::InvalidPath.into());
}
let (parent_path, last_path) = path.split_at(path.0.len() - 1);
let last_index = *last_path.first().unwrap();
let parent_node = self
.node_id_at_path(parent_path)
.ok_or_else(|| ErrorBuilder::new(OTErrorCode::PathNotFound).build())?;
if parent_path.is_empty() {
self.insert_nodes_at_index(self.root, last_index, nodes)
} else {
let parent_node = self
.node_id_at_path(parent_path)
.ok_or_else(|| ErrorBuilder::new(OTErrorCode::PathNotFound).build())?;
self.insert_nodes_at_index(parent_node, last_index, nodes)
self.insert_nodes_at_index(parent_node, last_index, nodes)
}
}
/// Inserts nodes before the node with node_id
@ -252,34 +341,24 @@ impl NodeTree {
}
}
fn update_attributes(&mut self, path: &Path, attributes: AttributeHashMap) -> Result<(), OTError> {
self.mut_node_at_path(path, |node| {
let new_attributes = AttributeHashMap::compose(&node.attributes, &attributes)?;
node.attributes = new_attributes;
Ok(())
})
}
fn delete_node(&mut self, path: &Path, nodes: Vec<NodeData>) -> Result<(), OTError> {
let mut update_node = self
.node_id_at_path(path)
.ok_or_else(|| ErrorBuilder::new(OTErrorCode::PathNotFound).build())?;
for _ in 0..nodes.len() {
let next = update_node.following_siblings(&self.arena).next();
update_node.remove_subtree(&mut self.arena);
if let Some(next_id) = next {
update_node = next_id;
} else {
break;
/// Removes a node and its descendants from the tree
fn delete_node(&mut self, path: &Path) -> Result<(), OTError> {
if !path.is_valid() {
return Err(OTErrorCode::InvalidPath.into());
}
match self.node_id_at_path(path) {
None => tracing::warn!("Can't find any node at path: {:?}", path),
Some(node) => {
node.remove_subtree(&mut self.arena);
}
}
Ok(())
}
fn update_body(&mut self, path: &Path, changeset: NodeBodyChangeset) -> Result<(), OTError> {
fn update(&mut self, path: &Path, changeset: Changeset) -> Result<(), OTError> {
self.mut_node_at_path(path, |node| {
node.apply_body_changeset(changeset);
let _ = node.apply_changeset(changeset)?;
Ok(())
})
}
@ -288,6 +367,9 @@ impl NodeTree {
where
F: FnOnce(&mut Node) -> Result<(), OTError>,
{
if !path.is_valid() {
return Err(OTErrorCode::InvalidPath.into());
}
let node_id = self
.node_id_at_path(path)
.ok_or_else(|| ErrorBuilder::new(OTErrorCode::PathNotFound).build())?;

View File

@ -0,0 +1,63 @@
use crate::core::{NodeData, NodeTree, NodeTreeContext};
use serde::de::{MapAccess, Visitor};
use serde::ser::SerializeSeq;
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
use std::fmt;
use std::marker::PhantomData;
impl Serialize for NodeTree {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let root_node_id = self.root_node_id();
let mut children = self.get_children_ids(root_node_id);
if children.is_empty() {
return serializer.serialize_str("");
}
if children.len() == 1 {
let node_id = children.pop().unwrap();
match self.get_node_data(node_id) {
None => serializer.serialize_str(""),
Some(node_data) => node_data.serialize(serializer),
}
} else {
let mut seq = serializer.serialize_seq(Some(children.len()))?;
for child in children {
if let Some(child_node_data) = self.get_node_data(child) {
let _ = seq.serialize_element(&child_node_data)?;
}
}
seq.end()
}
}
}
impl<'de> Deserialize<'de> for NodeTree {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct NodeTreeVisitor(PhantomData<NodeData>);
impl<'de> Visitor<'de> for NodeTreeVisitor {
type Value = NodeData;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("Expected node data tree")
}
fn visit_map<V>(self, map: V) -> Result<Self::Value, V::Error>
where
V: MapAccess<'de>,
{
// Forward the deserialization to NodeData
Deserialize::deserialize(de::value::MapAccessDeserializer::new(map))
}
}
let node_data: NodeData = deserializer.deserialize_any(NodeTreeVisitor(PhantomData))?;
Ok(NodeTree::from_node_data(node_data, NodeTreeContext::default()).unwrap())
}
}

View File

@ -74,6 +74,7 @@ pub enum OTErrorCode {
Internal,
PathNotFound,
PathIsEmpty,
InvalidPath,
UnexpectedEmpty,
}

View File

@ -1,164 +0,0 @@
use super::script::{NodeScript::*, *};
use lib_ot::core::AttributeBuilder;
use lib_ot::{
core::{NodeData, Path},
text_delta::TextOperationBuilder,
};
#[test]
fn editor_deserialize_node_test() {
let mut test = NodeTest::new();
let node: NodeData = serde_json::from_str(EXAMPLE_JSON).unwrap();
let path: Path = 0.into();
let expected_delta = TextOperationBuilder::new()
.insert("👋 ")
.insert_with_attributes(
"Welcome to ",
AttributeBuilder::new().insert("href", "appflowy.io").build(),
)
.insert_with_attributes(
"AppFlowy Editor",
AttributeBuilder::new().insert("italic", true).build(),
)
.build();
test.run_scripts(vec![
InsertNode {
path,
node_data: node.clone(),
rev_id: 1,
},
AssertNumberOfNodesAtPath { path: None, len: 1 },
AssertNumberOfNodesAtPath {
path: Some(0.into()),
len: 14,
},
AssertNumberOfNodesAtPath {
path: Some(0.into()),
len: 14,
},
AssertNodeDelta {
path: vec![0, 1].into(),
expected: expected_delta,
},
AssertNodeData {
path: vec![0, 0].into(),
expected: Some(node.children[0].clone()),
},
AssertNodeData {
path: vec![0, 3].into(),
expected: Some(node.children[3].clone()),
},
]);
}
#[allow(dead_code)]
const EXAMPLE_JSON: &str = r#"
{
"type": "editor",
"children": [
{
"type": "image",
"attributes": {
"image_src": "https://s1.ax1x.com/2022/08/26/v2sSbR.jpg",
"align": "center"
}
},
{
"type": "text",
"attributes": {
"subtype": "heading",
"heading": "h1"
},
"body": {
"delta": [
{
"insert": "👋 "
},
{
"insert": "Welcome to ",
"attributes": {
"href": "appflowy.io"
}
},
{
"insert": "AppFlowy Editor",
"attributes": {
"italic": true
}
}
]
}
},
{ "type": "text", "delta": [] },
{
"type": "text",
"body": {
"delta": [
{ "insert": "AppFlowy Editor is a " },
{ "insert": "highly customizable", "attributes": { "bold": true } },
{ "insert": " " },
{ "insert": "rich-text editor", "attributes": { "italic": true } },
{ "insert": " for " },
{ "insert": "Flutter", "attributes": { "underline": true } }
]
}
},
{
"type": "text",
"attributes": { "checkbox": true, "subtype": "checkbox" },
"body": {
"delta": [{ "insert": "Customizable" }]
}
},
{
"type": "text",
"attributes": { "checkbox": true, "subtype": "checkbox" },
"delta": [{ "insert": "Test-covered" }]
},
{
"type": "text",
"attributes": { "checkbox": false, "subtype": "checkbox" },
"delta": [{ "insert": "more to come!" }]
},
{ "type": "text", "delta": [] },
{
"type": "text",
"attributes": { "subtype": "quote" },
"delta": [{ "insert": "Here is an exmaple you can give it a try" }]
},
{ "type": "text", "delta": [] },
{
"type": "text",
"delta": [
{ "insert": "You can also use " },
{
"insert": "AppFlowy Editor",
"attributes": {
"italic": true,
"bold": true,
"backgroundColor": "0x6000BCF0"
}
},
{ "insert": " as a component to build your own app." }
]
},
{ "type": "text", "delta": [] },
{
"type": "text",
"attributes": { "subtype": "bulleted-list" },
"delta": [{ "insert": "Use / to insert blocks" }]
},
{
"type": "text",
"attributes": { "subtype": "bulleted-list" },
"delta": [
{
"insert": "Select text to trigger to the toolbar to format your notes."
}
]
}
]
}
"#;

View File

@ -1,4 +1,4 @@
mod editor_test;
mod operation_test;
mod script;
mod serde_test;
mod tree_test;

View File

@ -1,75 +1,7 @@
use crate::node::script::NodeScript::*;
use crate::node::script::NodeTest;
use lib_ot::core::{AttributeBuilder, Node};
use lib_ot::{
core::{NodeBodyChangeset, NodeData, NodeDataBuilder, NodeOperation, Path},
text_delta::TextOperationBuilder,
};
#[test]
fn operation_insert_node_serde_test() {
let insert = NodeOperation::Insert {
path: Path(vec![0, 1]),
nodes: vec![NodeData::new("text".to_owned())],
};
let result = serde_json::to_string(&insert).unwrap();
assert_eq!(result, r#"{"op":"insert","path":[0,1],"nodes":[{"type":"text"}]}"#);
}
#[test]
fn operation_insert_node_with_children_serde_test() {
let node = NodeDataBuilder::new("text")
.add_node(NodeData::new("sub_text".to_owned()))
.build();
let insert = NodeOperation::Insert {
path: Path(vec![0, 1]),
nodes: vec![node],
};
assert_eq!(
serde_json::to_string(&insert).unwrap(),
r#"{"op":"insert","path":[0,1],"nodes":[{"type":"text","children":[{"type":"sub_text"}]}]}"#
);
}
#[test]
fn operation_update_node_attributes_serde_test() {
let operation = NodeOperation::UpdateAttributes {
path: Path(vec![0, 1]),
new: AttributeBuilder::new().insert("bold", true).build(),
old: AttributeBuilder::new().insert("bold", false).build(),
};
let result = serde_json::to_string(&operation).unwrap();
assert_eq!(
result,
r#"{"op":"update-attribute","path":[0,1],"new":{"bold":true},"old":{"bold":null}}"#
);
}
#[test]
fn operation_update_node_body_serialize_test() {
let delta = TextOperationBuilder::new().insert("AppFlowy...").build();
let inverted = delta.invert_str("");
let changeset = NodeBodyChangeset::Delta { delta, inverted };
let insert = NodeOperation::UpdateBody {
path: Path(vec![0, 1]),
changeset,
};
let result = serde_json::to_string(&insert).unwrap();
assert_eq!(
result,
r#"{"op":"update-body","path":[0,1],"changeset":{"delta":{"delta":[{"insert":"AppFlowy..."}],"inverted":[{"delete":11}]}}}"#
);
//
}
#[test]
fn operation_update_node_body_deserialize_test() {
let json_1 = r#"{"op":"update-body","path":[0,1],"changeset":{"delta":{"delta":[{"insert":"AppFlowy..."}],"inverted":[{"delete":11}]}}}"#;
let operation: NodeOperation = serde_json::from_str(json_1).unwrap();
let json_2 = serde_json::to_string(&operation).unwrap();
assert_eq!(json_1, json_2);
}
use lib_ot::core::{NodeDataBuilder, NodeOperation, Path};
#[test]
fn operation_insert_op_transform_test() {
@ -94,12 +26,12 @@ fn operation_insert_op_transform_test() {
}
#[test]
fn operation_insert_transform_one_level_path_test() {
fn operation_insert_one_level_path_test() {
let mut test = NodeTest::new();
let node_data_1 = NodeDataBuilder::new("text_1").build();
let node_data_2 = NodeDataBuilder::new("text_2").build();
let node_data_3 = NodeDataBuilder::new("text_3").build();
let node_3: Node = node_data_3.clone().into();
let node_3 = node_data_3.clone();
// 0: text_1
// 1: text_2
//
@ -140,16 +72,16 @@ fn operation_insert_transform_one_level_path_test() {
}
#[test]
fn operation_insert_transform_multiple_level_path_test() {
fn operation_insert_with_multiple_level_path_test() {
let mut test = NodeTest::new();
let node_data_1 = NodeDataBuilder::new("text_1")
.add_node(NodeDataBuilder::new("text_1_1").build())
.add_node(NodeDataBuilder::new("text_1_2").build())
.add_node_data(NodeDataBuilder::new("text_1_1").build())
.add_node_data(NodeDataBuilder::new("text_1_2").build())
.build();
let node_data_2 = NodeDataBuilder::new("text_2")
.add_node(NodeDataBuilder::new("text_2_1").build())
.add_node(NodeDataBuilder::new("text_2_2").build())
.add_node_data(NodeDataBuilder::new("text_2_1").build())
.add_node_data(NodeDataBuilder::new("text_2_2").build())
.build();
let node_data_3 = NodeDataBuilder::new("text_3").build();
@ -178,12 +110,12 @@ fn operation_insert_transform_multiple_level_path_test() {
}
#[test]
fn operation_delete_transform_path_test() {
fn operation_delete_test() {
let mut test = NodeTest::new();
let node_data_1 = NodeDataBuilder::new("text_1").build();
let node_data_2 = NodeDataBuilder::new("text_2").build();
let node_data_3 = NodeDataBuilder::new("text_3").build();
let node_3: Node = node_data_3.clone().into();
let node_3 = node_data_3.clone();
let scripts = vec![
InsertNode {
@ -221,7 +153,10 @@ fn operation_delete_transform_path_test() {
// After perform the delete action, the tree will be:
// 0: text_1
// 1: text_3
AssertNumberOfNodesAtPath { path: None, len: 2 },
AssertNumberOfChildrenAtPath {
path: None,
expected: 2,
},
AssertNode {
path: 1.into(),
expected: Some(node_3),

View File

@ -1,7 +1,8 @@
use lib_ot::core::{Node, Transaction};
#![allow(clippy::all)]
use lib_ot::core::{NodeTreeContext, Transaction};
use lib_ot::{
core::attributes::AttributeHashMap,
core::{NodeBody, NodeBodyChangeset, NodeData, NodeTree, Path, TransactionBuilder},
core::{Body, Changeset, NodeData, NodeTree, Path, TransactionBuilder},
text_delta::TextOperations,
};
use std::collections::HashMap;
@ -12,34 +13,47 @@ pub enum NodeScript {
node_data: NodeData,
rev_id: usize,
},
InsertNodes {
path: Path,
node_data_list: Vec<NodeData>,
rev_id: usize,
},
UpdateAttributes {
path: Path,
attributes: AttributeHashMap,
},
UpdateBody {
path: Path,
changeset: NodeBodyChangeset,
changeset: Changeset,
},
DeleteNode {
path: Path,
rev_id: usize,
},
AssertNumberOfNodesAtPath {
AssertNumberOfChildrenAtPath {
path: Option<Path>,
len: usize,
expected: usize,
},
AssertNodeData {
AssertNodesAtRoot {
expected: Vec<NodeData>,
},
#[allow(dead_code)]
AssertNodesAtPath {
path: Path,
expected: Option<NodeData>,
expected: Vec<NodeData>,
},
AssertNode {
path: Path,
expected: Option<Node>,
expected: Option<NodeData>,
},
AssertNodeDelta {
path: Path,
expected: TextOperations,
},
#[allow(dead_code)]
AssertTreeJSON {
expected: String,
},
}
pub struct NodeTest {
@ -53,7 +67,7 @@ impl NodeTest {
Self {
rev_id: 0,
rev_operations: HashMap::new(),
node_tree: NodeTree::new("root"),
node_tree: NodeTree::new(NodeTreeContext::default()),
}
}
@ -76,6 +90,17 @@ impl NodeTest {
self.transform_transaction_if_need(&mut transaction, rev_id);
self.apply_transaction(transaction);
}
NodeScript::InsertNodes {
path,
node_data_list,
rev_id,
} => {
let mut transaction = TransactionBuilder::new(&self.node_tree)
.insert_nodes_at_path(path, node_data_list)
.finalize();
self.transform_transaction_if_need(&mut transaction, rev_id);
self.apply_transaction(transaction);
}
NodeScript::UpdateAttributes { path, attributes } => {
let transaction = TransactionBuilder::new(&self.node_tree)
.update_attributes_at_path(&path, attributes)
@ -96,48 +121,42 @@ impl NodeTest {
self.transform_transaction_if_need(&mut transaction, rev_id);
self.apply_transaction(transaction);
}
NodeScript::AssertNode { path, expected } => {
let node_id = self.node_tree.node_id_at_path(path);
if expected.is_none() && node_id.is_none() {
return;
}
let node = self.node_tree.get_node(node_id.unwrap()).cloned();
assert_eq!(node, expected);
let node = self.node_tree.get_node_data_at_path(&path);
assert_eq!(node, expected.map(|e| e.into()));
}
NodeScript::AssertNodeData { path, expected } => {
let node_id = self.node_tree.node_id_at_path(path);
match node_id {
None => assert!(node_id.is_none()),
Some(node_id) => {
let node = self.node_tree.get_node(node_id).cloned();
assert_eq!(node, expected.map(|e| e.into()));
}
}
}
NodeScript::AssertNumberOfNodesAtPath {
path,
len: expected_len,
} => match path {
NodeScript::AssertNumberOfChildrenAtPath { path, expected } => match path {
None => {
let len = self.node_tree.number_of_children(None);
assert_eq!(len, expected_len)
assert_eq!(len, expected)
}
Some(path) => {
let node_id = self.node_tree.node_id_at_path(path).unwrap();
let len = self.node_tree.number_of_children(Some(node_id));
assert_eq!(len, expected_len)
assert_eq!(len, expected)
}
},
NodeScript::AssertNodesAtRoot { expected } => {
let nodes = self.node_tree.get_node_data_at_root().unwrap().children;
assert_eq!(nodes, expected)
}
NodeScript::AssertNodesAtPath { path, expected } => {
let nodes = self.node_tree.get_node_data_at_path(&path).unwrap().children;
assert_eq!(nodes, expected)
}
NodeScript::AssertNodeDelta { path, expected } => {
let node = self.node_tree.get_node_at_path(&path).unwrap();
if let NodeBody::Delta(delta) = node.body.clone() {
if let Body::Delta(delta) = node.body.clone() {
debug_assert_eq!(delta, expected);
} else {
panic!("Node body type not match, expect Delta");
}
}
NodeScript::AssertTreeJSON { expected } => {
let json = serde_json::to_string(&self.node_tree).unwrap();
assert_eq!(json, expected)
}
}
}

View File

@ -0,0 +1,147 @@
use lib_ot::core::{
AttributeBuilder, Changeset, Extension, Interval, NodeData, NodeDataBuilder, NodeOperation, NodeTree, Path,
Transaction,
};
use lib_ot::text_delta::TextOperationBuilder;
#[test]
fn operation_insert_node_serde_test() {
let insert = NodeOperation::Insert {
path: Path(vec![0, 1]),
nodes: vec![NodeData::new("text".to_owned())],
};
let result = serde_json::to_string(&insert).unwrap();
assert_eq!(result, r#"{"op":"insert","path":[0,1],"nodes":[{"type":"text"}]}"#);
}
#[test]
fn operation_insert_node_with_children_serde_test() {
let node = NodeDataBuilder::new("text")
.add_node_data(NodeData::new("sub_text".to_owned()))
.build();
let insert = NodeOperation::Insert {
path: Path(vec![0, 1]),
nodes: vec![node],
};
assert_eq!(
serde_json::to_string(&insert).unwrap(),
r#"{"op":"insert","path":[0,1],"nodes":[{"type":"text","children":[{"type":"sub_text"}]}]}"#
);
}
#[test]
fn operation_update_node_attributes_serde_test() {
let operation = NodeOperation::Update {
path: Path(vec![0, 1]),
changeset: Changeset::Attributes {
new: AttributeBuilder::new().insert("bold", true).build(),
old: AttributeBuilder::new().insert("bold", false).build(),
},
};
let result = serde_json::to_string(&operation).unwrap();
assert_eq!(
result,
r#"{"op":"update","path":[0,1],"new":{"bold":true},"old":{"bold":null}}"#
);
}
#[test]
fn operation_update_node_body_serialize_test() {
let delta = TextOperationBuilder::new().insert("AppFlowy...").build();
let inverted = delta.invert_str("");
let changeset = Changeset::Delta { delta, inverted };
let insert = NodeOperation::Update {
path: Path(vec![0, 1]),
changeset,
};
let result = serde_json::to_string(&insert).unwrap();
assert_eq!(
result,
r#"{"op":"update","path":[0,1],"delta":[{"insert":"AppFlowy..."}],"inverted":[{"delete":11}]}"#
);
}
#[test]
fn operation_update_node_body_deserialize_test() {
let json_1 = r#"{"op":"update","path":[0,1],"delta":[{"insert":"AppFlowy..."}],"inverted":[{"delete":11}]}"#;
let operation: NodeOperation = serde_json::from_str(json_1).unwrap();
let json_2 = serde_json::to_string(&operation).unwrap();
assert_eq!(json_1, json_2);
}
#[test]
fn transaction_serialize_test() {
let insert = NodeOperation::Insert {
path: Path(vec![0, 1]),
nodes: vec![NodeData::new("text".to_owned())],
};
let mut transaction = Transaction::from_operations(vec![insert]);
transaction.extension = Extension::TextSelection {
before_selection: Interval::new(0, 1),
after_selection: Interval::new(1, 2),
};
let json = serde_json::to_string(&transaction).unwrap();
assert_eq!(
json,
r#"{"operations":[{"op":"insert","path":[0,1],"nodes":[{"type":"text"}]}],"TextSelection":{"before_selection":{"start":0,"end":1},"after_selection":{"start":1,"end":2}}}"#
);
}
#[test]
fn transaction_deserialize_test() {
let json = r#"{"operations":[{"op":"insert","path":[0,1],"nodes":[{"type":"text"}]}],"TextSelection":{"before_selection":{"start":0,"end":1},"after_selection":{"start":1,"end":2}}}"#;
let transaction: Transaction = serde_json::from_str(json).unwrap();
assert_eq!(transaction.operations.len(), 1);
}
#[test]
fn node_tree_deserialize_test() {
let tree: NodeTree = serde_json::from_str(TREE_JSON).unwrap();
assert_eq!(tree.number_of_children(None), 1);
}
#[test]
fn node_tree_serialize_test() {
let tree: NodeTree = serde_json::from_str(TREE_JSON).unwrap();
let json = serde_json::to_string_pretty(&tree).unwrap();
assert_eq!(json, TREE_JSON);
}
#[allow(dead_code)]
const TREE_JSON: &str = r#"{
"type": "editor",
"children": [
{
"type": "image",
"attributes": {
"image_src": "https://s1.ax1x.com/2022/08/26/v2sSbR.jpg"
}
},
{
"type": "text",
"attributes": {
"heading": "h1"
},
"body": {
"delta": [
{
"insert": "👋 "
},
{
"insert": "Welcome to ",
"attributes": {
"href": "appflowy.io"
}
},
{
"insert": "AppFlowy Editor",
"attributes": {
"italic": true
}
}
]
}
}
]
}"#;

View File

@ -1,25 +1,169 @@
use crate::node::script::NodeScript::*;
use crate::node::script::NodeTest;
use lib_ot::core::NodeBody;
use lib_ot::core::NodeBodyChangeset;
use lib_ot::core::Body;
use lib_ot::core::Changeset;
use lib_ot::core::OperationTransform;
use lib_ot::core::{NodeData, NodeDataBuilder, Path};
use lib_ot::text_delta::TextOperationBuilder;
use lib_ot::text_delta::{TextOperationBuilder, TextOperations};
#[test]
fn node_insert_test() {
let mut test = NodeTest::new();
let inserted_node = NodeData::new("text");
let path: Path = 0.into();
let node_data = NodeData::new("text");
let path: Path = vec![0].into();
let scripts = vec![
InsertNode {
path: path.clone(),
node_data: inserted_node.clone(),
node_data: node_data.clone(),
rev_id: 1,
},
AssertNodeData {
AssertNode {
path,
expected: Some(inserted_node),
expected: Some(node_data),
},
];
test.run_scripts(scripts);
}
#[test]
#[should_panic]
fn node_insert_with_empty_path_test() {
let mut test = NodeTest::new();
let scripts = vec![InsertNode {
path: vec![].into(),
node_data: NodeData::new("text"),
rev_id: 1,
}];
test.run_scripts(scripts);
}
#[test]
#[should_panic]
fn node_insert_with_not_exist_path_test() {
let mut test = NodeTest::new();
let node_data = NodeData::new("text");
let path: Path = vec![0, 0, 9].into();
let scripts = vec![
InsertNode {
path: path.clone(),
node_data: node_data.clone(),
rev_id: 1,
},
AssertNode {
path,
expected: Some(node_data),
},
];
test.run_scripts(scripts);
}
#[test]
// Append the node to the end of the list if the insert path is out of bounds.
fn node_insert_out_of_bound_test() {
let mut test = NodeTest::new();
let image_a = NodeData::new("image_a");
let image_b = NodeData::new("image_b");
let image = NodeDataBuilder::new("image_1")
.add_node_data(image_a.clone())
.add_node_data(image_b.clone())
.build();
let text_node = NodeDataBuilder::new("text_1").add_node_data(image.clone()).build();
let image_c = NodeData::new("image_c");
let scripts = vec![
InsertNode {
path: 0.into(),
node_data: text_node,
rev_id: 1,
},
// 0:text_1
// 0:image_1
// 0:image_a
// 1:image_b
InsertNode {
path: vec![0, 0, 10].into(),
node_data: image_c.clone(),
rev_id: 2,
},
// 0:text_1
// 0:image_1
// 0:image_a
// 1:image_b
// 2:image_b
AssertNode {
path: vec![0, 0, 2].into(),
expected: Some(image_c),
},
AssertNode {
path: vec![0, 0, 10].into(),
expected: None,
},
];
test.run_scripts(scripts);
}
#[test]
fn tree_insert_multiple_nodes_at_root_path_test() {
let mut test = NodeTest::new();
let node_1 = NodeData::new("a");
let node_2 = NodeData::new("b");
let node_3 = NodeData::new("c");
let node_data_list = vec![node_1, node_2, node_3];
let path: Path = vec![0].into();
// Insert three nodes under the root
let scripts = vec![
// 0:a
// 1:b
// 2:c
InsertNodes {
path,
node_data_list: node_data_list.clone(),
rev_id: 1,
},
AssertNodesAtRoot {
expected: node_data_list,
},
];
test.run_scripts(scripts);
}
#[test]
fn tree_insert_multiple_nodes_at_root_path_test2() {
let mut test = NodeTest::new();
let node_1 = NodeData::new("a");
let node_2 = NodeData::new("b");
let node_3 = NodeData::new("c");
let scripts = vec![
InsertNode {
path: 0.into(),
node_data: node_1.clone(),
rev_id: 1,
},
InsertNode {
path: 1.into(),
node_data: node_2.clone(),
rev_id: 2,
},
InsertNode {
path: 2.into(),
node_data: node_3.clone(),
rev_id: 3,
},
// 0:a
// 1:b
// 2:c
AssertNode {
path: 0.into(),
expected: Some(node_1),
},
AssertNode {
path: 1.into(),
expected: Some(node_2),
},
AssertNode {
path: 2.into(),
expected: Some(node_3),
},
];
test.run_scripts(scripts);
@ -28,61 +172,40 @@ fn node_insert_test() {
#[test]
fn node_insert_node_with_children_test() {
let mut test = NodeTest::new();
let inserted_node = NodeDataBuilder::new("text").add_node(NodeData::new("image")).build();
let image_1 = NodeData::new("image_a");
let image_2 = NodeData::new("image_b");
let image = NodeDataBuilder::new("image")
.add_node_data(image_1.clone())
.add_node_data(image_2.clone())
.build();
let node_data = NodeDataBuilder::new("text").add_node_data(image.clone()).build();
let path: Path = 0.into();
let scripts = vec![
InsertNode {
path: path.clone(),
node_data: inserted_node.clone(),
node_data: node_data.clone(),
rev_id: 1,
},
AssertNodeData {
// 0:text
// 0:image
// 0:image_1
// 1:image_2
AssertNode {
path,
expected: Some(inserted_node),
expected: Some(node_data),
},
];
test.run_scripts(scripts);
}
#[test]
fn node_insert_multi_nodes_test() {
let mut test = NodeTest::new();
let path_1: Path = 0.into();
let node_1 = NodeData::new("text_1");
let path_2: Path = 1.into();
let node_2 = NodeData::new("text_2");
let path_3: Path = 2.into();
let node_3 = NodeData::new("text_3");
let scripts = vec![
InsertNode {
path: path_1.clone(),
node_data: node_1.clone(),
rev_id: 1,
AssertNode {
path: vec![0, 0].into(),
expected: Some(image),
},
InsertNode {
path: path_2.clone(),
node_data: node_2.clone(),
rev_id: 2,
AssertNode {
path: vec![0, 0, 0].into(),
expected: Some(image_1),
},
InsertNode {
path: path_3.clone(),
node_data: node_3.clone(),
rev_id: 3,
},
AssertNodeData {
path: path_1,
expected: Some(node_1),
},
AssertNodeData {
path: path_2,
expected: Some(node_2),
},
AssertNodeData {
path: path_3,
expected: Some(node_3),
AssertNode {
path: vec![0, 0, 1].into(),
expected: Some(image_2),
},
];
test.run_scripts(scripts);
@ -129,19 +252,22 @@ fn node_insert_node_in_ordered_nodes_test() {
// 1:text_2_2
// 2:text_2_1
// 3:text_3
AssertNodeData {
AssertNode {
path: path_1,
expected: Some(node_1),
},
AssertNodeData {
AssertNode {
path: path_2,
expected: Some(node_2_2),
},
AssertNodeData {
AssertNode {
path: path_3,
expected: Some(node_2_1),
},
AssertNumberOfNodesAtPath { path: None, len: 4 },
AssertNumberOfChildrenAtPath {
path: None,
expected: 4,
},
];
test.run_scripts(scripts);
}
@ -152,15 +278,15 @@ fn node_insert_nested_nodes_test() {
let node_data_1_1 = NodeDataBuilder::new("text_1_1").build();
let node_data_1_2 = NodeDataBuilder::new("text_1_2").build();
let node_data_1 = NodeDataBuilder::new("text_1")
.add_node(node_data_1_1.clone())
.add_node(node_data_1_2.clone())
.add_node_data(node_data_1_1.clone())
.add_node_data(node_data_1_2.clone())
.build();
let node_data_2_1 = NodeDataBuilder::new("text_2_1").build();
let node_data_2_2 = NodeDataBuilder::new("text_2_2").build();
let node_data_2 = NodeDataBuilder::new("text_2")
.add_node(node_data_2_1.clone())
.add_node(node_data_2_2.clone())
.add_node_data(node_data_2_1.clone())
.add_node_data(node_data_2_2.clone())
.build();
let scripts = vec![
@ -207,8 +333,8 @@ fn node_insert_node_before_existing_nested_nodes_test() {
let node_data_1_1 = NodeDataBuilder::new("text_1_1").build();
let node_data_1_2 = NodeDataBuilder::new("text_1_2").build();
let node_data_1 = NodeDataBuilder::new("text_1")
.add_node(node_data_1_1.clone())
.add_node(node_data_1_2.clone())
.add_node_data(node_data_1_1.clone())
.add_node_data(node_data_1_2.clone())
.build();
let scripts = vec![
@ -258,7 +384,7 @@ fn node_insert_with_attributes_test() {
path: path.clone(),
attributes: inserted_node.attributes.clone(),
},
AssertNodeData {
AssertNode {
path,
expected: Some(inserted_node),
},
@ -270,7 +396,6 @@ fn node_insert_with_attributes_test() {
fn node_delete_test() {
let mut test = NodeTest::new();
let inserted_node = NodeData::new("text");
let path: Path = 0.into();
let scripts = vec![
InsertNode {
@ -282,7 +407,314 @@ fn node_delete_test() {
path: path.clone(),
rev_id: 2,
},
AssertNodeData { path, expected: None },
AssertNode { path, expected: None },
];
test.run_scripts(scripts);
}
#[test]
fn node_delete_node_from_list_test() {
let mut test = NodeTest::new();
let image_a = NodeData::new("image_a");
let image_b = NodeData::new("image_b");
let image_1 = NodeDataBuilder::new("image_1")
.add_node_data(image_a.clone())
.add_node_data(image_b.clone())
.build();
let text_node_1 = NodeDataBuilder::new("text_1").add_node_data(image_1.clone()).build();
let image_2 = NodeDataBuilder::new("image_2")
.add_node_data(image_a.clone())
.add_node_data(image_b.clone())
.build();
let text_node_2 = NodeDataBuilder::new("text_2").add_node_data(image_2.clone()).build();
let scripts = vec![
InsertNode {
path: 0.into(),
node_data: text_node_1,
rev_id: 1,
},
InsertNode {
path: 1.into(),
node_data: text_node_2.clone(),
rev_id: 1,
},
DeleteNode {
path: 0.into(),
rev_id: 2,
},
AssertNode {
path: 1.into(),
expected: None,
},
AssertNode {
path: 0.into(),
expected: Some(text_node_2),
},
AssertNode {
path: vec![0, 0].into(),
expected: Some(image_2),
},
];
test.run_scripts(scripts);
}
#[test]
fn node_delete_nested_node_test() {
let mut test = NodeTest::new();
let image_a = NodeData::new("image_a");
let image_b = NodeData::new("image_b");
let image_1 = NodeDataBuilder::new("image_1")
.add_node_data(image_a.clone())
.add_node_data(image_b.clone())
.build();
let text_node_1 = NodeDataBuilder::new("text_1").add_node_data(image_1.clone()).build();
let image_2 = NodeDataBuilder::new("image_2")
.add_node_data(image_a.clone())
.add_node_data(image_b.clone())
.build();
let text_node_2 = NodeDataBuilder::new("text_2").add_node_data(image_2.clone()).build();
let scripts = vec![
InsertNode {
path: 0.into(),
node_data: text_node_1,
rev_id: 1,
},
InsertNode {
path: 1.into(),
node_data: text_node_2.clone(),
rev_id: 1,
},
// 0:text_1
// 0:image_1
// 0:image_a
// 1:image_b
// 1:text_2
// 0:image_2
// 0:image_a
// 1:image_b
DeleteNode {
path: vec![0, 0, 0].into(),
rev_id: 2,
},
// 0:text_1
// 0:image_1
// 0:image_b
// 1:text_2
// 0:image_2
// 0:image_a
// 1:image_b
AssertNode {
path: vec![0, 0, 0].into(),
expected: Some(image_b.clone()),
},
DeleteNode {
path: vec![0, 0].into(),
rev_id: 3,
},
// 0:text_1
// 1:text_2
// 0:image_2
// 0:image_a
// 1:image_b
AssertNumberOfChildrenAtPath {
path: Some(0.into()),
expected: 0,
},
AssertNode {
path: vec![0].into(),
expected: Some(NodeDataBuilder::new("text_1").build()),
},
AssertNode {
path: vec![1, 0, 0].into(),
expected: Some(image_a.clone()),
},
AssertNode {
path: vec![1, 0, 1].into(),
expected: Some(image_b.clone()),
},
];
test.run_scripts(scripts);
}
#[test]
fn node_delete_children_test() {
let mut test = NodeTest::new();
let inserted_node = NodeDataBuilder::new("text")
.add_node_data(NodeDataBuilder::new("sub_text_1").build())
.add_node_data(NodeDataBuilder::new("sub_text_2").build())
.add_node_data(NodeDataBuilder::new("sub_text_3").build())
.build();
let scripts = vec![
InsertNode {
path: vec![0].into(),
node_data: inserted_node,
rev_id: 1,
},
AssertNode {
path: vec![0, 0].into(),
expected: Some(NodeDataBuilder::new("sub_text_1").build()),
},
AssertNode {
path: vec![0, 1].into(),
expected: Some(NodeDataBuilder::new("sub_text_2").build()),
},
AssertNode {
path: vec![0, 2].into(),
expected: Some(NodeDataBuilder::new("sub_text_3").build()),
},
AssertNumberOfChildrenAtPath {
path: Some(Path(vec![0])),
expected: 3,
},
DeleteNode {
path: vec![0, 0].into(),
rev_id: 2,
},
AssertNode {
path: vec![0, 0].into(),
expected: Some(NodeDataBuilder::new("sub_text_2").build()),
},
AssertNumberOfChildrenAtPath {
path: Some(Path(vec![0])),
expected: 2,
},
];
test.run_scripts(scripts);
}
#[test]
fn node_reorder_sub_nodes_test() {
let mut test = NodeTest::new();
let image_a = NodeData::new("image_a");
let image_b = NodeData::new("image_b");
let child_1 = NodeDataBuilder::new("image_1")
.add_node_data(image_a.clone())
.add_node_data(image_b.clone())
.build();
let text_node_1 = NodeDataBuilder::new("text_1").add_node_data(child_1.clone()).build();
let scripts = vec![
InsertNode {
path: 0.into(),
node_data: text_node_1,
rev_id: 1,
},
// 0:text_1
// 0:image_1
// 0:image_a
// 1:image_b
DeleteNode {
path: vec![0, 0, 0].into(),
rev_id: 2,
},
// 0:text_1
// 0:image_1
// 0:image_b
InsertNode {
path: vec![0, 0, 1].into(),
node_data: image_a.clone(),
rev_id: 3,
},
// 0:text_1
// 0:image_1
// 0:image_b
// 1:image_a
AssertNode {
path: vec![0, 0, 0].into(),
expected: Some(image_b.clone()),
},
AssertNode {
path: vec![0, 0, 1].into(),
expected: Some(image_a.clone()),
},
];
test.run_scripts(scripts);
}
#[test]
fn node_reorder_nodes_test() {
let mut test = NodeTest::new();
let image_a = NodeData::new("image_a");
let image_b = NodeData::new("image_b");
let image_1 = NodeDataBuilder::new("image_1")
.add_node_data(image_a.clone())
.add_node_data(image_b.clone())
.build();
let text_node_1 = NodeDataBuilder::new("text_1").add_node_data(image_1.clone()).build();
let image_2 = NodeDataBuilder::new("image_2")
.add_node_data(image_a.clone())
.add_node_data(image_b.clone())
.build();
let text_node_2 = NodeDataBuilder::new("text_2").add_node_data(image_2.clone()).build();
let scripts = vec![
InsertNode {
path: 0.into(),
node_data: text_node_1.clone(),
rev_id: 1,
},
InsertNode {
path: 0.into(),
node_data: text_node_2.clone(),
rev_id: 1,
},
// 0:text_1
// 0:image_1
// 0:image_a
// 1:image_b
// 1:text_2
// 0:image_2
// 0:image_a
// 1:image_b
DeleteNode {
path: vec![0].into(),
rev_id: 2,
},
InsertNode {
path: vec![1].into(),
node_data: text_node_1.clone(),
rev_id: 3,
},
// 0:text_2
// 0:image_2
// 0:image_a
// 1:image_b
// 1:text_1
// 0:image_1
// 0:image_a
// 1:image_b
AssertNode {
path: vec![0].into(),
expected: Some(text_node_2.clone()),
},
AssertNode {
path: vec![0, 0].into(),
expected: Some(image_2.clone()),
},
AssertNode {
path: vec![0, 0, 0].into(),
expected: Some(image_a.clone()),
},
AssertNode {
path: vec![1].into(),
expected: Some(text_node_1.clone()),
},
AssertNode {
path: vec![1, 0].into(),
expected: Some(image_1.clone()),
},
AssertNode {
path: vec![1, 0, 1].into(),
expected: Some(image_b.clone()),
},
];
test.run_scripts(scripts);
}
@ -290,29 +722,79 @@ fn node_delete_test() {
#[test]
fn node_update_body_test() {
let mut test = NodeTest::new();
let path: Path = 0.into();
let s = "Hello".to_owned();
let init_delta = TextOperationBuilder::new().insert(&s).build();
let delta = TextOperationBuilder::new().retain(s.len()).insert(" AppFlowy").build();
let inverted = delta.invert(&init_delta);
let expected = init_delta.compose(&delta).unwrap();
let (initial_delta, changeset, _, expected) = make_node_delta_changeset("Hello", "AppFlowy");
let node = NodeDataBuilder::new("text")
.insert_body(NodeBody::Delta(init_delta))
.insert_body(Body::Delta(initial_delta))
.build();
let scripts = vec![
InsertNode {
path: path.clone(),
path: 0.into(),
node_data: node,
rev_id: 1,
},
UpdateBody {
path: path.clone(),
changeset: NodeBodyChangeset::Delta { delta, inverted },
path: 0.into(),
changeset,
},
AssertNodeDelta {
path: 0.into(),
expected,
},
AssertNodeDelta { path, expected },
];
test.run_scripts(scripts);
}
#[test]
fn node_inverted_body_changeset_test() {
let mut test = NodeTest::new();
let (initial_delta, changeset, inverted_changeset, _expected) = make_node_delta_changeset("Hello", "AppFlowy");
let node = NodeDataBuilder::new("text")
.insert_body(Body::Delta(initial_delta.clone()))
.build();
let scripts = vec![
InsertNode {
path: 0.into(),
node_data: node,
rev_id: 1,
},
UpdateBody {
path: 0.into(),
changeset,
},
UpdateBody {
path: 0.into(),
changeset: inverted_changeset,
},
AssertNodeDelta {
path: 0.into(),
expected: initial_delta,
},
];
test.run_scripts(scripts);
}
fn make_node_delta_changeset(
initial_content: &str,
insert_str: &str,
) -> (TextOperations, Changeset, Changeset, TextOperations) {
let initial_content = initial_content.to_owned();
let initial_delta = TextOperationBuilder::new().insert(&initial_content).build();
let delta = TextOperationBuilder::new()
.retain(initial_content.len())
.insert(insert_str)
.build();
let inverted = delta.invert(&initial_delta);
let expected = initial_delta.compose(&delta).unwrap();
let changeset = Changeset::Delta {
delta: delta.clone(),
inverted: inverted.clone(),
};
let inverted_changeset = Changeset::Delta {
delta: inverted,
inverted: delta,
};
(initial_delta, changeset, inverted_changeset, expected)
}