chore: rename crates

This commit is contained in:
appflowy
2022-03-19 16:52:28 +08:00
parent b1d87d95cf
commit 6a3820253f
166 changed files with 554 additions and 772 deletions

View File

@ -0,0 +1,36 @@
[package]
name = "flowy-sync"
version = "0.1.0"
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
lib-ot = { path = "../lib-ot" }
lib-infra = { path = "../lib-infra" }
flowy-derive = { path = "../flowy-derive" }
flowy-folder-data-model = { path = "../flowy-folder-data-model" }
flowy-grid-data-model = { path = "../flowy-grid-data-model" }
protobuf = {version = "2.18.0"}
bytes = "1.0"
log = "0.4.14"
md5 = "0.7.0"
tokio = { version = "1", features = ["full"] }
serde = { version = "1.0", features = ["derive", "rc"] }
serde_json = {version = "1.0"}
dissimilar = "1.0"
tracing = { version = "0.1", features = ["log"] }
url = "2.2"
strum = "0.21"
strum_macros = "0.21"
chrono = "0.4.19"
parking_lot = "0.11"
dashmap = "4.0"
futures = "0.3.15"
async-stream = "0.3.2"
[build-dependencies]
lib-infra = { path = "../lib-infra", features = ["protobuf_file_gen"] }
[features]
dart = ["lib-infra/dart"]

View File

@ -0,0 +1,3 @@
proto_crates = ["src/entities"]
event_files = []

View File

@ -0,0 +1,5 @@
use lib_infra::code_gen;
fn main() {
code_gen::protobuf_file::gen(env!("CARGO_PKG_NAME"), "./src/protobuf/proto");
}

View File

@ -0,0 +1 @@
[{"insert":"\n👋 Welcome to AppFlowy!"},{"insert":"\n","attributes":{"header":1}},{"insert":"\nHere are the basics"},{"insert":"\n","attributes":{"header":2}},{"insert":"Click anywhere and just start typing"},{"insert":"\n","attributes":{"list":"unchecked"}},{"insert":"Highlight","attributes":{"background":"#fff2cd"}},{"insert":" any text, and use the menu at the bottom to "},{"insert":"style","attributes":{"italic":true}},{"insert":" "},{"insert":"your","attributes":{"bold":true}},{"insert":" "},{"insert":"writing","attributes":{"underline":true}},{"insert":" "},{"insert":"however","attributes":{"code":true}},{"insert":" "},{"insert":"you","attributes":{"strike":true}},{"insert":" "},{"insert":"like","attributes":{"background":"#e8e0ff"}},{"insert":"\n","attributes":{"list":"unchecked"}},{"insert":"Click "},{"insert":"+ New Page","attributes":{"background":"#defff1","bold":true}},{"insert":" button at the bottom of your sidebar to add a new page"},{"insert":"\n","attributes":{"list":"unchecked"}},{"insert":"Click the "},{"insert":"'","attributes":{"background":"#defff1"}},{"insert":"+'","attributes":{"background":"#defff1","bold":true}},{"insert":" next to any page title in the sidebar to quickly add a new subpage"},{"insert":"\n","attributes":{"list":"unchecked"}},{"insert":"\nHave a question? "},{"insert":"\n","attributes":{"header":2}},{"insert":"Click the "},{"insert":"'?'","attributes":{"background":"#defff1","bold":true}},{"insert":" at the bottom right for help and support.\n\nLike AppFlowy? Follow us:"},{"insert":"\n","attributes":{"header":2}},{"insert":"GitHub: https://github.com/AppFlowy-IO/appflowy"},{"insert":"\n","attributes":{"blockquote":true}},{"insert":"Twitter: https://twitter.com/appflowy"},{"insert":"\n","attributes":{"blockquote":true}},{"insert":"Newsletter: https://www.appflowy.io/blog"},{"insert":"\n","attributes":{"blockquote":true}}]

View File

@ -0,0 +1,12 @@
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug)]
pub struct ImageData {
image: String,
}
impl ToString for ImageData {
fn to_string(&self) -> String {
self.image.clone()
}
}

View File

@ -0,0 +1 @@
[{"insert":"\n👋 Welcome to AppFlowy!"},{"insert":"\n","attributes":{"header":1}},{"insert":"\nHere are the basics"},{"insert":"\n","attributes":{"header":2}},{"insert":"Click anywhere and just start typing"},{"insert":"\n","attributes":{"list":"unchecked"}},{"insert":"Highlight","attributes":{"background":"#fff2cd"}},{"insert":" any text, and use the menu at the bottom to "},{"insert":"style","attributes":{"italic":true}},{"insert":" "},{"insert":"your","attributes":{"bold":true}},{"insert":" "},{"insert":"writing","attributes":{"underline":true}},{"insert":" "},{"insert":"however","attributes":{"code":true}},{"insert":" "},{"insert":"you","attributes":{"strike":true}},{"insert":" "},{"insert":"like","attributes":{"background":"#e8e0ff"}},{"insert":"\n","attributes":{"list":"unchecked"}},{"insert":"Click "},{"insert":"+ New Page","attributes":{"background":"#defff1","bold":true}},{"insert":" button at the bottom of your sidebar to add a new page"},{"insert":"\n","attributes":{"list":"unchecked"}},{"insert":"Click the "},{"insert":"'","attributes":{"background":"#defff1"}},{"insert":"+'","attributes":{"background":"#defff1","bold":true}},{"insert":" next to any page title in the sidebar to quickly add a new subpage"},{"insert":"\n","attributes":{"list":"unchecked"}},{"insert":"\nHave a question? "},{"insert":"\n","attributes":{"header":2}},{"insert":"Click the "},{"insert":"'?'","attributes":{"background":"#defff1","bold":true}},{"insert":" at the bottom right for help and support.\n\nLike AppFlowy? Follow us:"},{"insert":"\n","attributes":{"header":2}},{"insert":"GitHub: https://github.com/AppFlowy-IO/appflowy"},{"insert":"\n","attributes":{"blockquote":true}},{"insert":"Twitter: https://twitter.com/appflowy"},{"insert":"\n","attributes":{"blockquote":true}},{"insert":"Newsletter: https://www.appflowy.io/blog"},{"insert":"\n","attributes":{"blockquote":true}}]

View File

@ -0,0 +1,27 @@
use lib_ot::{core::DeltaBuilder, rich_text::RichTextDelta};
#[inline]
pub fn initial_quill_delta() -> RichTextDelta {
DeltaBuilder::new().insert("\n").build()
}
#[inline]
pub fn initial_quill_delta_string() -> String {
initial_quill_delta().to_delta_str()
}
#[inline]
pub fn initial_read_me() -> RichTextDelta {
let json = include_str!("READ_ME.json");
RichTextDelta::from_delta_str(json).unwrap()
}
#[cfg(test)]
mod tests {
use crate::client_document::default::initial_read_me;
#[test]
fn load_read_me() {
println!("{}", initial_read_me().to_delta_str());
}
}

View File

@ -0,0 +1,228 @@
use crate::{
client_document::{
default::initial_quill_delta,
history::{History, UndoResult},
view::{ViewExtensions, RECORD_THRESHOLD},
},
errors::CollaborateError,
};
use bytes::Bytes;
use lib_ot::{
core::*,
rich_text::{RichTextAttribute, RichTextDelta},
};
use tokio::sync::mpsc;
pub trait InitialDocumentText {
fn initial_delta() -> RichTextDelta;
}
pub struct PlainDoc();
impl InitialDocumentText for PlainDoc {
fn initial_delta() -> RichTextDelta {
RichTextDelta::new()
}
}
pub struct NewlineDoc();
impl InitialDocumentText for NewlineDoc {
fn initial_delta() -> RichTextDelta {
initial_quill_delta()
}
}
pub struct ClientDocument {
delta: RichTextDelta,
history: History,
view: ViewExtensions,
last_edit_time: usize,
notify: Option<mpsc::UnboundedSender<()>>,
}
impl ClientDocument {
pub fn new<C: InitialDocumentText>() -> Self {
Self::from_delta(C::initial_delta())
}
pub fn from_delta(delta: RichTextDelta) -> Self {
ClientDocument {
delta,
history: History::new(),
view: ViewExtensions::new(),
last_edit_time: 0,
notify: None,
}
}
pub fn from_json(json: &str) -> Result<Self, CollaborateError> {
let delta = RichTextDelta::from_delta_str(json)?;
Ok(Self::from_delta(delta))
}
pub fn delta_str(&self) -> String {
self.delta.to_delta_str()
}
pub fn to_bytes(&self) -> Bytes {
self.delta.to_delta_bytes()
}
pub fn to_plain_string(&self) -> String {
self.delta.apply("").unwrap()
}
pub fn delta(&self) -> &RichTextDelta {
&self.delta
}
pub fn md5(&self) -> String {
let bytes = self.to_bytes();
format!("{:x}", md5::compute(bytes))
}
pub fn set_notify(&mut self, notify: mpsc::UnboundedSender<()>) {
self.notify = Some(notify);
}
pub fn set_delta(&mut self, data: RichTextDelta) {
tracing::trace!("document: {}", data.to_delta_str());
self.delta = data;
match &self.notify {
None => {}
Some(notify) => {
let _ = notify.send(());
}
}
}
pub fn compose_delta(&mut self, delta: RichTextDelta) -> Result<(), CollaborateError> {
tracing::trace!("{} compose {}", &self.delta.to_delta_str(), delta.to_delta_str());
let composed_delta = self.delta.compose(&delta)?;
let mut undo_delta = delta.invert(&self.delta);
let now = chrono::Utc::now().timestamp_millis() as usize;
if now - self.last_edit_time < RECORD_THRESHOLD {
if let Some(last_delta) = self.history.undo() {
tracing::trace!("compose previous change");
tracing::trace!("current = {}", undo_delta);
tracing::trace!("previous = {}", last_delta);
undo_delta = undo_delta.compose(&last_delta)?;
}
} else {
self.last_edit_time = now;
}
if !undo_delta.is_empty() {
tracing::trace!("add history delta: {}", undo_delta);
self.history.record(undo_delta);
}
self.set_delta(composed_delta);
Ok(())
}
pub fn insert<T: ToString>(&mut self, index: usize, data: T) -> Result<RichTextDelta, CollaborateError> {
let text = data.to_string();
let interval = Interval::new(index, index);
let _ = validate_interval(&self.delta, &interval)?;
let delta = self.view.insert(&self.delta, &text, interval)?;
self.compose_delta(delta.clone())?;
Ok(delta)
}
pub fn delete(&mut self, interval: Interval) -> Result<RichTextDelta, CollaborateError> {
let _ = validate_interval(&self.delta, &interval)?;
debug_assert!(!interval.is_empty());
let delete = self.view.delete(&self.delta, interval)?;
if !delete.is_empty() {
let _ = self.compose_delta(delete.clone())?;
}
Ok(delete)
}
pub fn format(
&mut self,
interval: Interval,
attribute: RichTextAttribute,
) -> Result<RichTextDelta, CollaborateError> {
let _ = validate_interval(&self.delta, &interval)?;
tracing::trace!("format {} with {}", interval, attribute);
let format_delta = self.view.format(&self.delta, attribute, interval).unwrap();
self.compose_delta(format_delta.clone())?;
Ok(format_delta)
}
pub fn replace<T: ToString>(&mut self, interval: Interval, data: T) -> Result<RichTextDelta, CollaborateError> {
let _ = validate_interval(&self.delta, &interval)?;
let mut delta = RichTextDelta::default();
let text = data.to_string();
if !text.is_empty() {
delta = self.view.insert(&self.delta, &text, interval)?;
self.compose_delta(delta.clone())?;
}
if !interval.is_empty() {
let delete = self.delete(interval)?;
delta = delta.compose(&delete)?;
}
Ok(delta)
}
pub fn can_undo(&self) -> bool {
self.history.can_undo()
}
pub fn can_redo(&self) -> bool {
self.history.can_redo()
}
pub fn undo(&mut self) -> Result<UndoResult, CollaborateError> {
match self.history.undo() {
None => Err(CollaborateError::undo().context("Undo stack is empty")),
Some(undo_delta) => {
let (new_delta, inverted_delta) = self.invert(&undo_delta)?;
self.set_delta(new_delta);
self.history.add_redo(inverted_delta);
Ok(UndoResult { delta: undo_delta })
}
}
}
pub fn redo(&mut self) -> Result<UndoResult, CollaborateError> {
match self.history.redo() {
None => Err(CollaborateError::redo()),
Some(redo_delta) => {
let (new_delta, inverted_delta) = self.invert(&redo_delta)?;
self.set_delta(new_delta);
self.history.add_undo(inverted_delta);
Ok(UndoResult { delta: redo_delta })
}
}
}
pub fn is_empty(&self) -> bool {
// The document is empty if its text is equal to the initial text.
self.delta == NewlineDoc::initial_delta()
}
}
impl ClientDocument {
fn invert(&self, delta: &RichTextDelta) -> Result<(RichTextDelta, RichTextDelta), CollaborateError> {
// c = a.compose(b)
// d = b.invert(a)
// a = c.compose(d)
let new_delta = self.delta.compose(delta)?;
let inverted_delta = delta.invert(&self.delta);
Ok((new_delta, inverted_delta))
}
}
fn validate_interval(delta: &RichTextDelta, interval: &Interval) -> Result<(), CollaborateError> {
if delta.utf16_target_len < interval.end {
log::error!("{:?} out of bounds. should 0..{}", interval, delta.utf16_target_len);
return Err(CollaborateError::out_of_bound());
}
Ok(())
}

View File

@ -0,0 +1,21 @@
use crate::client_document::DeleteExt;
use lib_ot::{
core::{DeltaBuilder, Interval},
rich_text::RichTextDelta,
};
pub struct DefaultDelete {}
impl DeleteExt for DefaultDelete {
fn ext_name(&self) -> &str {
"DefaultDelete"
}
fn apply(&self, _delta: &RichTextDelta, interval: Interval) -> Option<RichTextDelta> {
Some(
DeltaBuilder::new()
.retain(interval.start)
.delete(interval.size())
.build(),
)
}
}

View File

@ -0,0 +1,5 @@
mod default_delete;
mod preserve_line_format_merge;
pub use default_delete::*;
pub use preserve_line_format_merge::*;

View File

@ -0,0 +1,62 @@
use crate::{client_document::DeleteExt, util::is_newline};
use lib_ot::{
core::{Attributes, DeltaBuilder, DeltaIter, Interval, Utf16CodeUnitMetric, NEW_LINE},
rich_text::{plain_attributes, RichTextDelta},
};
pub struct PreserveLineFormatOnMerge {}
impl DeleteExt for PreserveLineFormatOnMerge {
fn ext_name(&self) -> &str {
"PreserveLineFormatOnMerge"
}
fn apply(&self, delta: &RichTextDelta, interval: Interval) -> Option<RichTextDelta> {
if interval.is_empty() {
return None;
}
// seek to the interval start pos. e.g. You backspace enter pos
let mut iter = DeltaIter::from_offset(delta, interval.start);
// op will be the "\n"
let newline_op = iter.next_op_with_len(1)?;
if !is_newline(newline_op.get_data()) {
return None;
}
iter.seek::<Utf16CodeUnitMetric>(interval.size() - 1);
let mut new_delta = DeltaBuilder::new()
.retain(interval.start)
.delete(interval.size())
.build();
while iter.has_next() {
match iter.next() {
None => log::error!("op must be not None when has_next() return true"),
Some(op) => {
//
match op.get_data().find(NEW_LINE) {
None => {
new_delta.retain(op.len(), plain_attributes());
continue;
}
Some(line_break) => {
let mut attributes = op.get_attributes();
attributes.mark_all_as_removed_except(None);
if newline_op.has_attribute() {
attributes.extend_other(newline_op.get_attributes());
}
new_delta.retain(line_break, plain_attributes());
new_delta.retain(1, attributes);
break;
}
}
}
}
}
Some(new_delta)
}
}

View File

@ -0,0 +1,48 @@
// use crate::{
// client::extensions::FormatExt,
// core::{Attribute, AttributeKey, Delta, DeltaBuilder, DeltaIter,
// Interval}, };
//
// pub struct FormatLinkAtCaretPositionExt {}
//
// impl FormatExt for FormatLinkAtCaretPositionExt {
// fn ext_name(&self) -> &str {
// std::any::type_name::<FormatLinkAtCaretPositionExt>() }
//
// fn apply(&self, delta: &Delta, interval: Interval, attribute: &Attribute)
// -> Option<Delta> { if attribute.key != AttributeKey::Link ||
// interval.size() != 0 { return None;
// }
//
// let mut iter = DeltaIter::from_offset(delta, interval.start);
// let (before, after) = (iter.next_op_with_len(interval.size()),
// iter.next_op()); let mut start = interval.end;
// let mut retain = 0;
//
// if let Some(before) = before {
// if before.contain_attribute(attribute) {
// start -= before.len();
// retain += before.len();
// }
// }
//
// if let Some(after) = after {
// if after.contain_attribute(attribute) {
// if retain != 0 {
// retain += after.len();
// }
// }
// }
//
// if retain == 0 {
// return None;
// }
//
// Some(
// DeltaBuilder::new()
// .retain(start)
// .retain_with_attributes(retain, (attribute.clone()).into())
// .build(),
// )
// }
// }

View File

@ -0,0 +1,7 @@
pub use format_at_position::*;
pub use resolve_block_format::*;
pub use resolve_inline_format::*;
mod format_at_position;
mod resolve_block_format;
mod resolve_inline_format;

View File

@ -0,0 +1,54 @@
use lib_ot::{
core::{DeltaBuilder, DeltaIter, Interval},
rich_text::{plain_attributes, AttributeScope, RichTextAttribute, RichTextDelta},
};
use crate::{
client_document::{extensions::helper::line_break, FormatExt},
util::find_newline,
};
pub struct ResolveBlockFormat {}
impl FormatExt for ResolveBlockFormat {
fn ext_name(&self) -> &str {
"ResolveBlockFormat"
}
fn apply(&self, delta: &RichTextDelta, interval: Interval, attribute: &RichTextAttribute) -> Option<RichTextDelta> {
if attribute.scope != AttributeScope::Block {
return None;
}
let mut new_delta = DeltaBuilder::new().retain(interval.start).build();
let mut iter = DeltaIter::from_offset(delta, interval.start);
let mut start = 0;
let end = interval.size();
while start < end && iter.has_next() {
let next_op = iter.next_op_with_len(end - start).unwrap();
match find_newline(next_op.get_data()) {
None => new_delta.retain(next_op.len(), plain_attributes()),
Some(_) => {
let tmp_delta = line_break(&next_op, attribute, AttributeScope::Block);
new_delta.extend(tmp_delta);
}
}
start += next_op.len();
}
while iter.has_next() {
let op = iter.next_op().expect("Unexpected None, iter.has_next() must return op");
match find_newline(op.get_data()) {
None => new_delta.retain(op.len(), plain_attributes()),
Some(line_break) => {
new_delta.retain(line_break, plain_attributes());
new_delta.retain(1, attribute.clone().into());
break;
}
}
}
Some(new_delta)
}
}

View File

@ -0,0 +1,41 @@
use lib_ot::{
core::{DeltaBuilder, DeltaIter, Interval},
rich_text::{AttributeScope, RichTextAttribute, RichTextDelta},
};
use crate::{
client_document::{extensions::helper::line_break, FormatExt},
util::find_newline,
};
pub struct ResolveInlineFormat {}
impl FormatExt for ResolveInlineFormat {
fn ext_name(&self) -> &str {
"ResolveInlineFormat"
}
fn apply(&self, delta: &RichTextDelta, interval: Interval, attribute: &RichTextAttribute) -> Option<RichTextDelta> {
if attribute.scope != AttributeScope::Inline {
return None;
}
let mut new_delta = DeltaBuilder::new().retain(interval.start).build();
let mut iter = DeltaIter::from_offset(delta, interval.start);
let mut start = 0;
let end = interval.size();
while start < end && iter.has_next() {
let next_op = iter.next_op_with_len(end - start).unwrap();
match find_newline(next_op.get_data()) {
None => new_delta.retain(next_op.len(), attribute.clone().into()),
Some(_) => {
let tmp_delta = line_break(&next_op, attribute, AttributeScope::Inline);
new_delta.extend(tmp_delta);
}
}
start += next_op.len();
}
Some(new_delta)
}
}

View File

@ -0,0 +1,41 @@
use crate::util::find_newline;
use lib_ot::rich_text::{plain_attributes, AttributeScope, RichTextAttribute, RichTextDelta, RichTextOperation};
pub(crate) fn line_break(
op: &RichTextOperation,
attribute: &RichTextAttribute,
scope: AttributeScope,
) -> RichTextDelta {
let mut new_delta = RichTextDelta::new();
let mut start = 0;
let end = op.len();
let mut s = op.get_data();
while let Some(line_break) = find_newline(s) {
match scope {
AttributeScope::Inline => {
new_delta.retain(line_break - start, attribute.clone().into());
new_delta.retain(1, plain_attributes());
}
AttributeScope::Block => {
new_delta.retain(line_break - start, plain_attributes());
new_delta.retain(1, attribute.clone().into());
}
_ => {
log::error!("Unsupported parser line break for {:?}", scope);
}
}
start = line_break + 1;
s = &s[start..s.len()];
}
if start < end {
match scope {
AttributeScope::Inline => new_delta.retain(end - start, attribute.clone().into()),
AttributeScope::Block => new_delta.retain(end - start, plain_attributes()),
_ => log::error!("Unsupported parser line break for {:?}", scope),
}
}
new_delta
}

View File

@ -0,0 +1,56 @@
use crate::{client_document::InsertExt, util::is_newline};
use lib_ot::{
core::{is_empty_line_at_index, DeltaBuilder, DeltaIter},
rich_text::{attributes_except_header, RichTextAttributeKey, RichTextDelta},
};
pub struct AutoExitBlock {}
impl InsertExt for AutoExitBlock {
fn ext_name(&self) -> &str {
"AutoExitBlock"
}
fn apply(&self, delta: &RichTextDelta, replace_len: usize, text: &str, index: usize) -> Option<RichTextDelta> {
// Auto exit block will be triggered by enter two new lines
if !is_newline(text) {
return None;
}
if !is_empty_line_at_index(delta, index) {
return None;
}
let mut iter = DeltaIter::from_offset(delta, index);
let next = iter.next_op()?;
let mut attributes = next.get_attributes();
let block_attributes = attributes_except_header(&next);
if block_attributes.is_empty() {
return None;
}
if next.len() > 1 {
return None;
}
match iter.next_op_with_newline() {
None => {}
Some((newline_op, _)) => {
let newline_attributes = attributes_except_header(&newline_op);
if block_attributes == newline_attributes {
return None;
}
}
}
attributes.mark_all_as_removed_except(Some(RichTextAttributeKey::Header));
Some(
DeltaBuilder::new()
.retain(index + replace_len)
.retain_with_attributes(1, attributes)
.build(),
)
}
}

View File

@ -0,0 +1,87 @@
use crate::{client_document::InsertExt, util::is_whitespace};
use lib_ot::{
core::{count_utf16_code_units, DeltaBuilder, DeltaIter},
rich_text::{plain_attributes, RichTextAttribute, RichTextAttributes, RichTextDelta},
};
use std::cmp::min;
use url::Url;
pub struct AutoFormatExt {}
impl InsertExt for AutoFormatExt {
fn ext_name(&self) -> &str {
"AutoFormatExt"
}
fn apply(&self, delta: &RichTextDelta, replace_len: usize, text: &str, index: usize) -> Option<RichTextDelta> {
// enter whitespace to trigger auto format
if !is_whitespace(text) {
return None;
}
let mut iter = DeltaIter::new(delta);
if let Some(prev) = iter.next_op_with_len(index) {
match AutoFormat::parse(prev.get_data()) {
None => {}
Some(formatter) => {
let mut new_attributes = prev.get_attributes();
// format_len should not greater than index. The url crate will add "/" to the
// end of input string that causes the format_len greater than the input string
let format_len = min(index, formatter.format_len());
let format_attributes = formatter.to_attributes();
format_attributes.iter().for_each(|(k, v)| {
if !new_attributes.contains_key(k) {
new_attributes.insert(k.clone(), v.clone());
}
});
let next_attributes = match iter.next_op() {
None => plain_attributes(),
Some(op) => op.get_attributes(),
};
return Some(
DeltaBuilder::new()
.retain(index + replace_len - min(index, format_len))
.retain_with_attributes(format_len, format_attributes)
.insert_with_attributes(text, next_attributes)
.build(),
);
}
}
}
None
}
}
pub enum AutoFormatter {
Url(Url),
}
impl AutoFormatter {
pub fn to_attributes(&self) -> RichTextAttributes {
match self {
AutoFormatter::Url(url) => RichTextAttribute::Link(url.as_str()).into(),
}
}
pub fn format_len(&self) -> usize {
let s = match self {
AutoFormatter::Url(url) => url.to_string(),
};
count_utf16_code_units(&s)
}
}
pub struct AutoFormat {}
impl AutoFormat {
fn parse(s: &str) -> Option<AutoFormatter> {
if let Ok(url) = Url::parse(s) {
return Some(AutoFormatter::Url(url));
}
None
}
}

View File

@ -0,0 +1,40 @@
use crate::client_document::InsertExt;
use lib_ot::{
core::{Attributes, DeltaBuilder, DeltaIter, NEW_LINE},
rich_text::{RichTextAttributeKey, RichTextAttributes, RichTextDelta},
};
pub struct DefaultInsertAttribute {}
impl InsertExt for DefaultInsertAttribute {
fn ext_name(&self) -> &str {
"DefaultInsertAttribute"
}
fn apply(&self, delta: &RichTextDelta, replace_len: usize, text: &str, index: usize) -> Option<RichTextDelta> {
let iter = DeltaIter::new(delta);
let mut attributes = RichTextAttributes::new();
// Enable each line split by "\n" remains the block attributes. for example:
// insert "\n" to "123456" at index 3
//
// [{"insert":"123"},{"insert":"\n","attributes":{"header":1}},
// {"insert":"456"},{"insert":"\n","attributes":{"header":1}}]
if text.ends_with(NEW_LINE) {
match iter.last() {
None => {}
Some(op) => {
if op.get_attributes().contains_key(&RichTextAttributeKey::Header) {
attributes.extend_other(op.get_attributes());
}
}
}
}
Some(
DeltaBuilder::new()
.retain(index + replace_len)
.insert_with_attributes(text, attributes)
.build(),
)
}
}

View File

@ -0,0 +1,37 @@
use crate::client_document::InsertExt;
pub use auto_exit_block::*;
pub use auto_format::*;
pub use default_insert::*;
use lib_ot::rich_text::RichTextDelta;
pub use preserve_block_format::*;
pub use preserve_inline_format::*;
pub use reset_format_on_new_line::*;
mod auto_exit_block;
mod auto_format;
mod default_insert;
mod preserve_block_format;
mod preserve_inline_format;
mod reset_format_on_new_line;
pub struct InsertEmbedsExt {}
impl InsertExt for InsertEmbedsExt {
fn ext_name(&self) -> &str {
"InsertEmbedsExt"
}
fn apply(&self, _delta: &RichTextDelta, _replace_len: usize, _text: &str, _index: usize) -> Option<RichTextDelta> {
None
}
}
pub struct ForceNewlineForInsertsAroundEmbedExt {}
impl InsertExt for ForceNewlineForInsertsAroundEmbedExt {
fn ext_name(&self) -> &str {
"ForceNewlineForInsertsAroundEmbedExt"
}
fn apply(&self, _delta: &RichTextDelta, _replace_len: usize, _text: &str, _index: usize) -> Option<RichTextDelta> {
None
}
}

View File

@ -0,0 +1,64 @@
use crate::{client_document::InsertExt, util::is_newline};
use lib_ot::{
core::{DeltaBuilder, DeltaIter, NEW_LINE},
rich_text::{
attributes_except_header, plain_attributes, RichTextAttribute, RichTextAttributeKey, RichTextAttributes,
RichTextDelta,
},
};
pub struct PreserveBlockFormatOnInsert {}
impl InsertExt for PreserveBlockFormatOnInsert {
fn ext_name(&self) -> &str {
"PreserveBlockFormatOnInsert"
}
fn apply(&self, delta: &RichTextDelta, replace_len: usize, text: &str, index: usize) -> Option<RichTextDelta> {
if !is_newline(text) {
return None;
}
let mut iter = DeltaIter::from_offset(delta, index);
match iter.next_op_with_newline() {
None => {}
Some((newline_op, offset)) => {
let newline_attributes = newline_op.get_attributes();
let block_attributes = attributes_except_header(&newline_op);
if block_attributes.is_empty() {
return None;
}
let mut reset_attribute = RichTextAttributes::new();
if newline_attributes.contains_key(&RichTextAttributeKey::Header) {
reset_attribute.add(RichTextAttribute::Header(1));
}
let lines: Vec<_> = text.split(NEW_LINE).collect();
let mut new_delta = DeltaBuilder::new().retain(index + replace_len).build();
lines.iter().enumerate().for_each(|(i, line)| {
if !line.is_empty() {
new_delta.insert(line, plain_attributes());
}
if i == 0 {
new_delta.insert(NEW_LINE, newline_attributes.clone());
} else if i < lines.len() - 1 {
new_delta.insert(NEW_LINE, block_attributes.clone());
} else {
// do nothing
}
});
if !reset_attribute.is_empty() {
new_delta.retain(offset, plain_attributes());
let len = newline_op.get_data().find(NEW_LINE).unwrap();
new_delta.retain(len, plain_attributes());
new_delta.retain(1, reset_attribute);
}
return Some(new_delta);
}
}
None
}
}

View File

@ -0,0 +1,97 @@
use crate::{
client_document::InsertExt,
util::{contain_newline, is_newline},
};
use lib_ot::{
core::{DeltaBuilder, DeltaIter, OpNewline, NEW_LINE},
rich_text::{plain_attributes, RichTextAttributeKey, RichTextDelta},
};
pub struct PreserveInlineFormat {}
impl InsertExt for PreserveInlineFormat {
fn ext_name(&self) -> &str {
"PreserveInlineFormat"
}
fn apply(&self, delta: &RichTextDelta, replace_len: usize, text: &str, index: usize) -> Option<RichTextDelta> {
if contain_newline(text) {
return None;
}
let mut iter = DeltaIter::new(delta);
let prev = iter.next_op_with_len(index)?;
if OpNewline::parse(&prev).is_contain() {
return None;
}
let mut attributes = prev.get_attributes();
if attributes.is_empty() || !attributes.contains_key(&RichTextAttributeKey::Link) {
return Some(
DeltaBuilder::new()
.retain(index + replace_len)
.insert_with_attributes(text, attributes)
.build(),
);
}
let next = iter.next_op();
match &next {
None => attributes = plain_attributes(),
Some(next) => {
if OpNewline::parse(next).is_equal() {
attributes = plain_attributes();
}
}
}
let new_delta = DeltaBuilder::new()
.retain(index + replace_len)
.insert_with_attributes(text, attributes)
.build();
Some(new_delta)
}
}
pub struct PreserveLineFormatOnSplit {}
impl InsertExt for PreserveLineFormatOnSplit {
fn ext_name(&self) -> &str {
"PreserveLineFormatOnSplit"
}
fn apply(&self, delta: &RichTextDelta, replace_len: usize, text: &str, index: usize) -> Option<RichTextDelta> {
if !is_newline(text) {
return None;
}
let mut iter = DeltaIter::new(delta);
let prev = iter.next_op_with_len(index)?;
if OpNewline::parse(&prev).is_end() {
return None;
}
let next = iter.next_op()?;
let newline_status = OpNewline::parse(&next);
if newline_status.is_end() {
return None;
}
let mut new_delta = RichTextDelta::new();
new_delta.retain(index + replace_len, plain_attributes());
if newline_status.is_contain() {
debug_assert!(!next.has_attribute());
new_delta.insert(NEW_LINE, plain_attributes());
return Some(new_delta);
}
match iter.next_op_with_newline() {
None => {}
Some((newline_op, _)) => {
new_delta.insert(NEW_LINE, newline_op.get_attributes());
}
}
Some(new_delta)
}
}

View File

@ -0,0 +1,40 @@
use crate::{client_document::InsertExt, util::is_newline};
use lib_ot::{
core::{DeltaBuilder, DeltaIter, Utf16CodeUnitMetric, NEW_LINE},
rich_text::{RichTextAttributeKey, RichTextAttributes, RichTextDelta},
};
pub struct ResetLineFormatOnNewLine {}
impl InsertExt for ResetLineFormatOnNewLine {
fn ext_name(&self) -> &str {
"ResetLineFormatOnNewLine"
}
fn apply(&self, delta: &RichTextDelta, replace_len: usize, text: &str, index: usize) -> Option<RichTextDelta> {
if !is_newline(text) {
return None;
}
let mut iter = DeltaIter::new(delta);
iter.seek::<Utf16CodeUnitMetric>(index);
let next_op = iter.next_op()?;
if !next_op.get_data().starts_with(NEW_LINE) {
return None;
}
let mut reset_attribute = RichTextAttributes::new();
if next_op.get_attributes().contains_key(&RichTextAttributeKey::Header) {
reset_attribute.delete(&RichTextAttributeKey::Header);
}
let len = index + replace_len;
Some(
DeltaBuilder::new()
.retain(len)
.insert_with_attributes(NEW_LINE, next_op.get_attributes())
.retain_with_attributes(1, reset_attribute)
.trim()
.build(),
)
}
}

View File

@ -0,0 +1,31 @@
pub use delete::*;
pub use format::*;
pub use insert::*;
use lib_ot::{
core::Interval,
rich_text::{RichTextAttribute, RichTextDelta},
};
mod delete;
mod format;
mod helper;
mod insert;
pub type InsertExtension = Box<dyn InsertExt + Send + Sync>;
pub type FormatExtension = Box<dyn FormatExt + Send + Sync>;
pub type DeleteExtension = Box<dyn DeleteExt + Send + Sync>;
pub trait InsertExt {
fn ext_name(&self) -> &str;
fn apply(&self, delta: &RichTextDelta, replace_len: usize, text: &str, index: usize) -> Option<RichTextDelta>;
}
pub trait FormatExt {
fn ext_name(&self) -> &str;
fn apply(&self, delta: &RichTextDelta, interval: Interval, attribute: &RichTextAttribute) -> Option<RichTextDelta>;
}
pub trait DeleteExt {
fn ext_name(&self) -> &str;
fn apply(&self, delta: &RichTextDelta, interval: Interval) -> Option<RichTextDelta>;
}

View File

@ -0,0 +1,80 @@
use lib_ot::rich_text::RichTextDelta;
const MAX_UNDOES: usize = 20;
#[derive(Debug, Clone)]
pub struct UndoResult {
pub delta: RichTextDelta,
}
#[derive(Debug, Clone)]
pub struct History {
#[allow(dead_code)]
cur_undo: usize,
undoes: Vec<RichTextDelta>,
redoes: Vec<RichTextDelta>,
capacity: usize,
}
impl std::default::Default for History {
fn default() -> Self {
History {
cur_undo: 1,
undoes: Vec::new(),
redoes: Vec::new(),
capacity: MAX_UNDOES,
}
}
}
impl History {
pub fn new() -> Self {
History::default()
}
pub fn can_undo(&self) -> bool {
!self.undoes.is_empty()
}
pub fn can_redo(&self) -> bool {
!self.redoes.is_empty()
}
pub fn add_undo(&mut self, delta: RichTextDelta) {
self.undoes.push(delta);
}
pub fn add_redo(&mut self, delta: RichTextDelta) {
self.redoes.push(delta);
}
pub fn record(&mut self, delta: RichTextDelta) {
if delta.ops.is_empty() {
return;
}
self.redoes.clear();
self.add_undo(delta);
if self.undoes.len() > self.capacity {
self.undoes.remove(0);
}
}
pub fn undo(&mut self) -> Option<RichTextDelta> {
if !self.can_undo() {
return None;
}
let delta = self.undoes.pop().unwrap();
Some(delta)
}
pub fn redo(&mut self) -> Option<RichTextDelta> {
if !self.can_redo() {
return None;
}
let delta = self.redoes.pop().unwrap();
Some(delta)
}
}

View File

@ -0,0 +1,12 @@
#![allow(clippy::module_inception)]
pub use document_pad::*;
pub(crate) use extensions::*;
pub use view::*;
mod data;
pub mod default;
mod document_pad;
mod extensions;
pub mod history;
mod view;

View File

@ -0,0 +1,111 @@
use crate::client_document::*;
use lib_ot::{
core::{trim, Interval},
errors::{ErrorBuilder, OTError, OTErrorCode},
rich_text::{RichTextAttribute, RichTextDelta},
};
pub const RECORD_THRESHOLD: usize = 400; // in milliseconds
pub struct ViewExtensions {
insert_exts: Vec<InsertExtension>,
format_exts: Vec<FormatExtension>,
delete_exts: Vec<DeleteExtension>,
}
impl ViewExtensions {
pub(crate) fn new() -> Self {
Self {
insert_exts: construct_insert_exts(),
format_exts: construct_format_exts(),
delete_exts: construct_delete_exts(),
}
}
pub(crate) fn insert(
&self,
delta: &RichTextDelta,
text: &str,
interval: Interval,
) -> Result<RichTextDelta, OTError> {
let mut new_delta = None;
for ext in &self.insert_exts {
if let Some(mut delta) = ext.apply(delta, interval.size(), text, interval.start) {
trim(&mut delta);
tracing::debug!("[{} extension]: process: {}", ext.ext_name(), delta);
new_delta = Some(delta);
break;
}
}
match new_delta {
None => Err(ErrorBuilder::new(OTErrorCode::ApplyInsertFail).build()),
Some(new_delta) => Ok(new_delta),
}
}
pub(crate) fn delete(&self, delta: &RichTextDelta, interval: Interval) -> Result<RichTextDelta, OTError> {
let mut new_delta = None;
for ext in &self.delete_exts {
if let Some(mut delta) = ext.apply(delta, interval) {
trim(&mut delta);
tracing::trace!("[{}]: applied, delta: {}", ext.ext_name(), delta);
new_delta = Some(delta);
break;
}
}
match new_delta {
None => Err(ErrorBuilder::new(OTErrorCode::ApplyDeleteFail).build()),
Some(new_delta) => Ok(new_delta),
}
}
pub(crate) fn format(
&self,
delta: &RichTextDelta,
attribute: RichTextAttribute,
interval: Interval,
) -> Result<RichTextDelta, OTError> {
let mut new_delta = None;
for ext in &self.format_exts {
if let Some(mut delta) = ext.apply(delta, interval, &attribute) {
trim(&mut delta);
tracing::trace!("[{}]: applied, delta: {}", ext.ext_name(), delta);
new_delta = Some(delta);
break;
}
}
match new_delta {
None => Err(ErrorBuilder::new(OTErrorCode::ApplyFormatFail).build()),
Some(new_delta) => Ok(new_delta),
}
}
}
fn construct_insert_exts() -> Vec<InsertExtension> {
vec![
Box::new(InsertEmbedsExt {}),
Box::new(ForceNewlineForInsertsAroundEmbedExt {}),
Box::new(AutoExitBlock {}),
Box::new(PreserveBlockFormatOnInsert {}),
Box::new(PreserveLineFormatOnSplit {}),
Box::new(ResetLineFormatOnNewLine {}),
Box::new(AutoFormatExt {}),
Box::new(PreserveInlineFormat {}),
Box::new(DefaultInsertAttribute {}),
]
}
fn construct_format_exts() -> Vec<FormatExtension> {
vec![
// Box::new(FormatLinkAtCaretPositionExt {}),
Box::new(ResolveBlockFormat {}),
Box::new(ResolveInlineFormat {}),
]
}
fn construct_delete_exts() -> Vec<DeleteExtension> {
vec![Box::new(PreserveLineFormatOnMerge {}), Box::new(DefaultDelete {})]
}

View File

@ -0,0 +1,64 @@
use crate::entities::folder_info::FolderDelta;
use crate::util::make_delta_from_revisions;
use crate::{
client_folder::{default_folder_delta, FolderPad},
entities::revision::Revision,
errors::{CollaborateError, CollaborateResult},
};
use flowy_folder_data_model::entities::{trash::Trash, workspace::Workspace};
use lib_ot::core::{PlainTextAttributes, PlainTextDelta, PlainTextDeltaBuilder};
use serde::{Deserialize, Serialize};
use std::sync::Arc;
#[derive(Serialize, Deserialize)]
pub(crate) struct FolderPadBuilder {
workspaces: Vec<Arc<Workspace>>,
trash: Vec<Arc<Trash>>,
}
impl FolderPadBuilder {
pub(crate) fn new() -> Self {
Self {
workspaces: vec![],
trash: vec![],
}
}
pub(crate) fn with_workspace(mut self, workspaces: Vec<Workspace>) -> Self {
self.workspaces = workspaces.into_iter().map(Arc::new).collect::<Vec<_>>();
self
}
pub(crate) fn with_trash(mut self, trash: Vec<Trash>) -> Self {
self.trash = trash.into_iter().map(Arc::new).collect::<Vec<_>>();
self
}
pub(crate) fn build_with_delta(self, mut delta: PlainTextDelta) -> CollaborateResult<FolderPad> {
if delta.is_empty() {
delta = default_folder_delta();
}
// TODO: Reconvert from history if delta.to_str() failed.
let folder_json = delta.to_str()?;
let mut folder: FolderPad = serde_json::from_str(&folder_json)
.map_err(|e| CollaborateError::internal().context(format!("Deserialize delta to folder failed: {}", e)))?;
folder.delta = delta;
Ok(folder)
}
pub(crate) fn build_with_revisions(self, revisions: Vec<Revision>) -> CollaborateResult<FolderPad> {
let folder_delta: FolderDelta = make_delta_from_revisions::<PlainTextAttributes>(revisions)?;
self.build_with_delta(folder_delta)
}
pub(crate) fn build(self) -> CollaborateResult<FolderPad> {
let json = serde_json::to_string(&self)
.map_err(|e| CollaborateError::internal().context(format!("Serialize to folder json str failed: {}", e)))?;
Ok(FolderPad {
workspaces: self.workspaces,
trash: self.trash,
delta: PlainTextDeltaBuilder::new().insert(&json).build(),
})
}
}

View File

@ -0,0 +1,796 @@
use crate::util::cal_diff;
use crate::{
client_folder::builder::FolderPadBuilder,
entities::{
folder_info::FolderDelta,
revision::{md5, Revision},
},
errors::{CollaborateError, CollaborateResult},
};
use flowy_folder_data_model::entities::{app::App, trash::Trash, view::View, workspace::Workspace};
use lib_ot::core::*;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
#[derive(Debug, Deserialize, Serialize, Clone, Eq, PartialEq)]
pub struct FolderPad {
pub(crate) workspaces: Vec<Arc<Workspace>>,
pub(crate) trash: Vec<Arc<Trash>>,
#[serde(skip)]
pub(crate) delta: FolderDelta,
}
impl FolderPad {
pub fn new(workspaces: Vec<Workspace>, trash: Vec<Trash>) -> CollaborateResult<Self> {
FolderPadBuilder::new()
.with_workspace(workspaces)
.with_trash(trash)
.build()
}
pub fn from_revisions(revisions: Vec<Revision>) -> CollaborateResult<Self> {
FolderPadBuilder::new().build_with_revisions(revisions)
}
pub fn from_delta(delta: FolderDelta) -> CollaborateResult<Self> {
FolderPadBuilder::new().build_with_delta(delta)
}
pub fn delta(&self) -> &FolderDelta {
&self.delta
}
pub fn reset_folder(&mut self, delta: FolderDelta) -> CollaborateResult<String> {
let folder = FolderPad::from_delta(delta)?;
self.workspaces = folder.workspaces;
self.trash = folder.trash;
self.delta = folder.delta;
Ok(self.md5())
}
pub fn compose_remote_delta(&mut self, delta: FolderDelta) -> CollaborateResult<String> {
let composed_delta = self.delta.compose(&delta)?;
self.reset_folder(composed_delta)
}
pub fn is_empty(&self) -> bool {
self.workspaces.is_empty() && self.trash.is_empty()
}
#[tracing::instrument(level = "trace", skip(self, workspace), fields(workspace_name=%workspace.name), err)]
pub fn create_workspace(&mut self, workspace: Workspace) -> CollaborateResult<Option<FolderChange>> {
let workspace = Arc::new(workspace);
if self.workspaces.contains(&workspace) {
tracing::warn!("[RootFolder]: Duplicate workspace");
return Ok(None);
}
self.modify_workspaces(move |workspaces| {
workspaces.push(workspace);
Ok(Some(()))
})
}
pub fn update_workspace(
&mut self,
workspace_id: &str,
name: Option<String>,
desc: Option<String>,
) -> CollaborateResult<Option<FolderChange>> {
self.with_workspace(workspace_id, |workspace| {
if let Some(name) = name {
workspace.name = name;
}
if let Some(desc) = desc {
workspace.desc = desc;
}
Ok(Some(()))
})
}
pub fn read_workspaces(&self, workspace_id: Option<String>) -> CollaborateResult<Vec<Workspace>> {
match workspace_id {
None => {
let workspaces = self
.workspaces
.iter()
.map(|workspace| workspace.as_ref().clone())
.collect::<Vec<Workspace>>();
Ok(workspaces)
}
Some(workspace_id) => {
if let Some(workspace) = self.workspaces.iter().find(|workspace| workspace.id == workspace_id) {
Ok(vec![workspace.as_ref().clone()])
} else {
Err(CollaborateError::record_not_found()
.context(format!("Can't find workspace with id {}", workspace_id)))
}
}
}
}
#[tracing::instrument(level = "trace", skip(self), err)]
pub fn delete_workspace(&mut self, workspace_id: &str) -> CollaborateResult<Option<FolderChange>> {
self.modify_workspaces(|workspaces| {
workspaces.retain(|w| w.id != workspace_id);
Ok(Some(()))
})
}
#[tracing::instrument(level = "trace", skip(self), fields(app_name=%app.name), err)]
pub fn create_app(&mut self, app: App) -> CollaborateResult<Option<FolderChange>> {
let workspace_id = app.workspace_id.clone();
self.with_workspace(&workspace_id, move |workspace| {
if workspace.apps.contains(&app) {
tracing::warn!("[RootFolder]: Duplicate app");
return Ok(None);
}
workspace.apps.push(app);
Ok(Some(()))
})
}
pub fn read_app(&self, app_id: &str) -> CollaborateResult<App> {
for workspace in &self.workspaces {
if let Some(app) = workspace.apps.iter().find(|app| app.id == app_id) {
return Ok(app.clone());
}
}
Err(CollaborateError::record_not_found().context(format!("Can't find app with id {}", app_id)))
}
pub fn update_app(
&mut self,
app_id: &str,
name: Option<String>,
desc: Option<String>,
) -> CollaborateResult<Option<FolderChange>> {
self.with_app(app_id, move |app| {
if let Some(name) = name {
app.name = name;
}
if let Some(desc) = desc {
app.desc = desc;
}
Ok(Some(()))
})
}
#[tracing::instrument(level = "trace", skip(self), err)]
pub fn delete_app(&mut self, app_id: &str) -> CollaborateResult<Option<FolderChange>> {
let app = self.read_app(app_id)?;
self.with_workspace(&app.workspace_id, |workspace| {
workspace.apps.retain(|app| app.id != app_id);
Ok(Some(()))
})
}
#[tracing::instrument(level = "trace", skip(self), fields(view_name=%view.name), err)]
pub fn create_view(&mut self, view: View) -> CollaborateResult<Option<FolderChange>> {
let app_id = view.belong_to_id.clone();
self.with_app(&app_id, move |app| {
if app.belongings.contains(&view) {
tracing::warn!("[RootFolder]: Duplicate view");
return Ok(None);
}
app.belongings.push(view);
Ok(Some(()))
})
}
pub fn read_view(&self, view_id: &str) -> CollaborateResult<View> {
for workspace in &self.workspaces {
for app in &(*workspace.apps) {
if let Some(view) = app.belongings.iter().find(|b| b.id == view_id) {
return Ok(view.clone());
}
}
}
Err(CollaborateError::record_not_found().context(format!("Can't find view with id {}", view_id)))
}
pub fn read_views(&self, belong_to_id: &str) -> CollaborateResult<Vec<View>> {
for workspace in &self.workspaces {
for app in &(*workspace.apps) {
if app.id == belong_to_id {
return Ok(app.belongings.clone().take_items());
}
}
}
Ok(vec![])
}
pub fn update_view(
&mut self,
view_id: &str,
name: Option<String>,
desc: Option<String>,
modified_time: i64,
) -> CollaborateResult<Option<FolderChange>> {
let view = self.read_view(view_id)?;
self.with_view(&view.belong_to_id, view_id, |view| {
if let Some(name) = name {
view.name = name;
}
if let Some(desc) = desc {
view.desc = desc;
}
view.modified_time = modified_time;
Ok(Some(()))
})
}
#[tracing::instrument(level = "trace", skip(self), err)]
pub fn delete_view(&mut self, view_id: &str) -> CollaborateResult<Option<FolderChange>> {
let view = self.read_view(view_id)?;
self.with_app(&view.belong_to_id, |app| {
app.belongings.retain(|view| view.id != view_id);
Ok(Some(()))
})
}
pub fn create_trash(&mut self, trash: Vec<Trash>) -> CollaborateResult<Option<FolderChange>> {
self.with_trash(|t| {
let mut new_trash = trash.into_iter().map(Arc::new).collect::<Vec<Arc<Trash>>>();
t.append(&mut new_trash);
Ok(Some(()))
})
}
pub fn read_trash(&self, trash_id: Option<String>) -> CollaborateResult<Vec<Trash>> {
match trash_id {
None => Ok(self.trash.iter().map(|t| t.as_ref().clone()).collect::<Vec<Trash>>()),
Some(trash_id) => match self.trash.iter().find(|t| t.id == trash_id) {
Some(trash) => Ok(vec![trash.as_ref().clone()]),
None => Ok(vec![]),
},
}
}
pub fn delete_trash(&mut self, trash_ids: Option<Vec<String>>) -> CollaborateResult<Option<FolderChange>> {
match trash_ids {
None => self.with_trash(|trash| {
trash.clear();
Ok(Some(()))
}),
Some(trash_ids) => self.with_trash(|trash| {
trash.retain(|t| !trash_ids.contains(&t.id));
Ok(Some(()))
}),
}
}
pub fn md5(&self) -> String {
md5(&self.delta.to_delta_bytes())
}
pub fn to_json(&self) -> CollaborateResult<String> {
serde_json::to_string(self)
.map_err(|e| CollaborateError::internal().context(format!("serial trash to json failed: {}", e)))
}
}
impl FolderPad {
fn modify_workspaces<F>(&mut self, f: F) -> CollaborateResult<Option<FolderChange>>
where
F: FnOnce(&mut Vec<Arc<Workspace>>) -> CollaborateResult<Option<()>>,
{
let cloned_self = self.clone();
match f(&mut self.workspaces)? {
None => Ok(None),
Some(_) => {
let old = cloned_self.to_json()?;
let new = self.to_json()?;
match cal_diff::<PlainTextAttributes>(old, new) {
None => Ok(None),
Some(delta) => {
self.delta = self.delta.compose(&delta)?;
Ok(Some(FolderChange { delta, md5: self.md5() }))
}
}
}
}
}
fn with_workspace<F>(&mut self, workspace_id: &str, f: F) -> CollaborateResult<Option<FolderChange>>
where
F: FnOnce(&mut Workspace) -> CollaborateResult<Option<()>>,
{
self.modify_workspaces(|workspaces| {
if let Some(workspace) = workspaces.iter_mut().find(|workspace| workspace_id == workspace.id) {
f(Arc::make_mut(workspace))
} else {
tracing::warn!("[FolderPad]: Can't find any workspace with id: {}", workspace_id);
Ok(None)
}
})
}
fn with_trash<F>(&mut self, f: F) -> CollaborateResult<Option<FolderChange>>
where
F: FnOnce(&mut Vec<Arc<Trash>>) -> CollaborateResult<Option<()>>,
{
let cloned_self = self.clone();
match f(&mut self.trash)? {
None => Ok(None),
Some(_) => {
let old = cloned_self.to_json()?;
let new = self.to_json()?;
match cal_diff::<PlainTextAttributes>(old, new) {
None => Ok(None),
Some(delta) => {
self.delta = self.delta.compose(&delta)?;
Ok(Some(FolderChange { delta, md5: self.md5() }))
}
}
}
}
}
fn with_app<F>(&mut self, app_id: &str, f: F) -> CollaborateResult<Option<FolderChange>>
where
F: FnOnce(&mut App) -> CollaborateResult<Option<()>>,
{
let workspace_id = match self
.workspaces
.iter()
.find(|workspace| workspace.apps.iter().any(|app| app.id == app_id))
{
None => {
tracing::warn!("[FolderPad]: Can't find any app with id: {}", app_id);
return Ok(None);
}
Some(workspace) => workspace.id.clone(),
};
self.with_workspace(&workspace_id, |workspace| {
// It's ok to unwrap because we get the workspace from the app_id.
f(workspace.apps.iter_mut().find(|app| app_id == app.id).unwrap())
})
}
fn with_view<F>(&mut self, belong_to_id: &str, view_id: &str, f: F) -> CollaborateResult<Option<FolderChange>>
where
F: FnOnce(&mut View) -> CollaborateResult<Option<()>>,
{
self.with_app(belong_to_id, |app| {
match app.belongings.iter_mut().find(|view| view_id == view.id) {
None => {
tracing::warn!("[FolderPad]: Can't find any view with id: {}", view_id);
Ok(None)
}
Some(view) => f(view),
}
})
}
}
pub fn default_folder_delta() -> FolderDelta {
PlainTextDeltaBuilder::new()
.insert(r#"{"workspaces":[],"trash":[]}"#)
.build()
}
pub fn initial_folder_delta(folder_pad: &FolderPad) -> CollaborateResult<FolderDelta> {
let json = folder_pad.to_json()?;
let delta = PlainTextDeltaBuilder::new().insert(&json).build();
Ok(delta)
}
impl std::default::Default for FolderPad {
fn default() -> Self {
FolderPad {
workspaces: vec![],
trash: vec![],
delta: default_folder_delta(),
}
}
}
pub struct FolderChange {
pub delta: FolderDelta,
/// md5: the md5 of the FolderPad's delta after applying the change.
pub md5: String,
}
#[cfg(test)]
mod tests {
#![allow(clippy::all)]
use crate::{client_folder::folder_pad::FolderPad, entities::folder_info::FolderDelta};
use chrono::Utc;
use flowy_folder_data_model::entities::{app::App, trash::Trash, view::View, workspace::Workspace};
use lib_ot::core::{OperationTransformable, PlainTextDelta, PlainTextDeltaBuilder};
#[test]
fn folder_add_workspace() {
let (mut folder, initial_delta, _) = test_folder();
let _time = Utc::now();
let mut workspace_1 = Workspace::default();
workspace_1.name = "My first workspace".to_owned();
let delta_1 = folder.create_workspace(workspace_1).unwrap().unwrap().delta;
let mut workspace_2 = Workspace::default();
workspace_2.name = "My second workspace".to_owned();
let delta_2 = folder.create_workspace(workspace_2).unwrap().unwrap().delta;
let folder_from_delta = make_folder_from_delta(initial_delta, vec![delta_1, delta_2]);
assert_eq!(folder, folder_from_delta);
}
#[test]
fn folder_update_workspace() {
let (mut folder, initial_delta, workspace) = test_folder();
assert_folder_equal(
&folder,
&make_folder_from_delta(initial_delta.clone(), vec![]),
r#"{"workspaces":[{"id":"1","name":"😁 my first workspace","desc":"","apps":[],"modified_time":0,"create_time":0}],"trash":[]}"#,
);
let delta = folder
.update_workspace(&workspace.id, Some("☺️ rename workspace".to_string()), None)
.unwrap()
.unwrap()
.delta;
let folder_from_delta = make_folder_from_delta(initial_delta, vec![delta]);
assert_folder_equal(
&folder,
&folder_from_delta,
r#"{"workspaces":[{"id":"1","name":"☺️ rename workspace","desc":"","apps":[],"modified_time":0,"create_time":0}],"trash":[]}"#,
);
}
#[test]
fn folder_add_app() {
let (folder, initial_delta, _app) = test_app_folder();
let folder_from_delta = make_folder_from_delta(initial_delta, vec![]);
assert_eq!(folder, folder_from_delta);
assert_folder_equal(
&folder,
&folder_from_delta,
r#"{
"workspaces": [
{
"id": "1",
"name": "😁 my first workspace",
"desc": "",
"apps": [
{
"id": "",
"workspace_id": "1",
"name": "😁 my first app",
"desc": "",
"belongings": [],
"version": 0,
"modified_time": 0,
"create_time": 0
}
],
"modified_time": 0,
"create_time": 0
}
],
"trash": []
}"#,
);
}
#[test]
fn folder_update_app() {
let (mut folder, initial_delta, app) = test_app_folder();
let delta = folder
.update_app(&app.id, Some("🤪 rename app".to_owned()), None)
.unwrap()
.unwrap()
.delta;
let new_folder = make_folder_from_delta(initial_delta, vec![delta]);
assert_folder_equal(
&folder,
&new_folder,
r#"{
"workspaces": [
{
"id": "1",
"name": "😁 my first workspace",
"desc": "",
"apps": [
{
"id": "",
"workspace_id": "1",
"name": "🤪 rename app",
"desc": "",
"belongings": [],
"version": 0,
"modified_time": 0,
"create_time": 0
}
],
"modified_time": 0,
"create_time": 0
}
],
"trash": []
}"#,
);
}
#[test]
fn folder_delete_app() {
let (mut folder, initial_delta, app) = test_app_folder();
let delta = folder.delete_app(&app.id).unwrap().unwrap().delta;
let new_folder = make_folder_from_delta(initial_delta, vec![delta]);
assert_folder_equal(
&folder,
&new_folder,
r#"{
"workspaces": [
{
"id": "1",
"name": "😁 my first workspace",
"desc": "",
"apps": [],
"modified_time": 0,
"create_time": 0
}
],
"trash": []
}"#,
);
}
#[test]
fn folder_add_view() {
let (folder, initial_delta, _view) = test_view_folder();
assert_folder_equal(
&folder,
&make_folder_from_delta(initial_delta, vec![]),
r#"
{
"workspaces": [
{
"id": "1",
"name": "😁 my first workspace",
"desc": "",
"apps": [
{
"id": "",
"workspace_id": "1",
"name": "😁 my first app",
"desc": "",
"belongings": [
{
"id": "",
"belong_to_id": "",
"name": "🎃 my first view",
"desc": "",
"view_type": "Blank",
"version": 0,
"belongings": [],
"modified_time": 0,
"create_time": 0
}
],
"version": 0,
"modified_time": 0,
"create_time": 0
}
],
"modified_time": 0,
"create_time": 0
}
],
"trash": []
}"#,
);
}
#[test]
fn folder_update_view() {
let (mut folder, initial_delta, view) = test_view_folder();
let delta = folder
.update_view(&view.id, Some("😦 rename view".to_owned()), None, 123)
.unwrap()
.unwrap()
.delta;
let new_folder = make_folder_from_delta(initial_delta, vec![delta]);
assert_folder_equal(
&folder,
&new_folder,
r#"{
"workspaces": [
{
"id": "1",
"name": "😁 my first workspace",
"desc": "",
"apps": [
{
"id": "",
"workspace_id": "1",
"name": "😁 my first app",
"desc": "",
"belongings": [
{
"id": "",
"belong_to_id": "",
"name": "😦 rename view",
"desc": "",
"view_type": "Blank",
"version": 0,
"belongings": [],
"modified_time": 123,
"create_time": 0
}
],
"version": 0,
"modified_time": 0,
"create_time": 0
}
],
"modified_time": 0,
"create_time": 0
}
],
"trash": []
}"#,
);
}
#[test]
fn folder_delete_view() {
let (mut folder, initial_delta, view) = test_view_folder();
let delta = folder.delete_view(&view.id).unwrap().unwrap().delta;
let new_folder = make_folder_from_delta(initial_delta, vec![delta]);
assert_folder_equal(
&folder,
&new_folder,
r#"{
"workspaces": [
{
"id": "1",
"name": "😁 my first workspace",
"desc": "",
"apps": [
{
"id": "",
"workspace_id": "1",
"name": "😁 my first app",
"desc": "",
"belongings": [],
"version": 0,
"modified_time": 0,
"create_time": 0
}
],
"modified_time": 0,
"create_time": 0
}
],
"trash": []
}"#,
);
}
#[test]
fn folder_add_trash() {
let (folder, initial_delta, _trash) = test_trash();
assert_folder_equal(
&folder,
&make_folder_from_delta(initial_delta, vec![]),
r#"{
"workspaces": [],
"trash": [
{
"id": "1",
"name": "🚽 my first trash",
"modified_time": 0,
"create_time": 0,
"ty": "Unknown"
}
]
}
"#,
);
}
#[test]
fn folder_delete_trash() {
let (mut folder, initial_delta, trash) = test_trash();
let delta = folder.delete_trash(Some(vec![trash.id])).unwrap().unwrap().delta;
assert_folder_equal(
&folder,
&make_folder_from_delta(initial_delta, vec![delta]),
r#"{
"workspaces": [],
"trash": []
}
"#,
);
}
fn test_folder() -> (FolderPad, FolderDelta, Workspace) {
let mut folder = FolderPad::default();
let folder_json = serde_json::to_string(&folder).unwrap();
let mut delta = PlainTextDeltaBuilder::new().insert(&folder_json).build();
let mut workspace = Workspace::default();
workspace.name = "😁 my first workspace".to_owned();
workspace.id = "1".to_owned();
delta = delta
.compose(&folder.create_workspace(workspace.clone()).unwrap().unwrap().delta)
.unwrap();
(folder, delta, workspace)
}
fn test_app_folder() -> (FolderPad, FolderDelta, App) {
let (mut folder, mut initial_delta, workspace) = test_folder();
let mut app = App::default();
app.workspace_id = workspace.id;
app.name = "😁 my first app".to_owned();
initial_delta = initial_delta
.compose(&folder.create_app(app.clone()).unwrap().unwrap().delta)
.unwrap();
(folder, initial_delta, app)
}
fn test_view_folder() -> (FolderPad, FolderDelta, View) {
let (mut folder, mut initial_delta, app) = test_app_folder();
let mut view = View::default();
view.belong_to_id = app.id.clone();
view.name = "🎃 my first view".to_owned();
initial_delta = initial_delta
.compose(&folder.create_view(view.clone()).unwrap().unwrap().delta)
.unwrap();
(folder, initial_delta, view)
}
fn test_trash() -> (FolderPad, FolderDelta, Trash) {
let mut folder = FolderPad::default();
let folder_json = serde_json::to_string(&folder).unwrap();
let mut delta = PlainTextDeltaBuilder::new().insert(&folder_json).build();
let mut trash = Trash::default();
trash.name = "🚽 my first trash".to_owned();
trash.id = "1".to_owned();
delta = delta
.compose(&folder.create_trash(vec![trash.clone()]).unwrap().unwrap().delta)
.unwrap();
(folder, delta, trash)
}
fn make_folder_from_delta(mut initial_delta: FolderDelta, deltas: Vec<PlainTextDelta>) -> FolderPad {
for delta in deltas {
initial_delta = initial_delta.compose(&delta).unwrap();
}
FolderPad::from_delta(initial_delta).unwrap()
}
fn assert_folder_equal(old: &FolderPad, new: &FolderPad, expected: &str) {
assert_eq!(old, new);
let json1 = old.to_json().unwrap();
let json2 = new.to_json().unwrap();
let expect_folder: FolderPad = serde_json::from_str(expected).unwrap();
assert_eq!(json1, expect_folder.to_json().unwrap());
assert_eq!(json1, json2);
}
}

View File

@ -0,0 +1,4 @@
mod builder;
mod folder_pad;
pub use folder_pad::*;

View File

@ -0,0 +1,370 @@
use crate::entities::revision::{md5, RepeatedRevision, Revision};
use crate::errors::{CollaborateError, CollaborateResult};
use crate::util::{cal_diff, make_delta_from_revisions};
use flowy_grid_data_model::entities::{GridBlockMetaSerde, RowMeta, RowMetaChangeset};
use lib_infra::uuid;
use lib_ot::core::{OperationTransformable, PlainTextAttributes, PlainTextDelta, PlainTextDeltaBuilder};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::sync::Arc;
pub type GridBlockMetaDelta = PlainTextDelta;
pub type GridBlockMetaDeltaBuilder = PlainTextDeltaBuilder;
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct GridBlockMetaPad {
block_id: String,
row_metas: Vec<Arc<RowMeta>>,
#[serde(skip)]
pub(crate) delta: GridBlockMetaDelta,
}
impl GridBlockMetaPad {
pub fn from_delta(delta: GridBlockMetaDelta) -> CollaborateResult<Self> {
let s = delta.to_str()?;
tracing::info!("delta: {}", delta);
tracing::info!("{}", s);
let block_meta: GridBlockMetaSerde = serde_json::from_str(&s).map_err(|e| {
let msg = format!("Deserialize delta to block meta failed: {}", e);
CollaborateError::internal().context(msg)
})?;
let block_id = block_meta.block_id;
let rows = block_meta
.row_metas
.into_iter()
.map(Arc::new)
.collect::<Vec<Arc<RowMeta>>>();
Ok(Self {
block_id,
row_metas: rows,
delta,
})
}
pub fn from_revisions(_grid_id: &str, revisions: Vec<Revision>) -> CollaborateResult<Self> {
let block_delta: GridBlockMetaDelta = make_delta_from_revisions::<PlainTextAttributes>(revisions)?;
Self::from_delta(block_delta)
}
pub fn add_row(
&mut self,
row: RowMeta,
start_row_id: Option<String>,
) -> CollaborateResult<Option<GridBlockMetaChange>> {
self.modify(|rows| {
if let Some(upper_row_id) = start_row_id {
if upper_row_id.is_empty() {
rows.insert(0, Arc::new(row));
return Ok(Some(()));
}
if let Some(index) = rows.iter().position(|row| row.id == upper_row_id) {
rows.insert(index, Arc::new(row));
return Ok(Some(()));
}
}
rows.push(Arc::new(row));
Ok(Some(()))
})
}
pub fn delete_rows(&mut self, row_ids: &[String]) -> CollaborateResult<Option<GridBlockMetaChange>> {
self.modify(|rows| {
rows.retain(|row| !row_ids.contains(&row.id));
Ok(Some(()))
})
}
pub fn get_rows(&self, row_ids: Option<Vec<String>>) -> CollaborateResult<Vec<Arc<RowMeta>>> {
match row_ids {
None => Ok(self.row_metas.to_vec()),
Some(row_ids) => {
let row_map = self
.row_metas
.iter()
.map(|row| (&row.id, row.clone()))
.collect::<HashMap<&String, Arc<RowMeta>>>();
Ok(row_ids
.iter()
.flat_map(|row_id| match row_map.get(row_id) {
None => {
tracing::error!("Can't find the row with id: {}", row_id);
None
}
Some(row) => Some(row.clone()),
})
.collect::<Vec<_>>())
}
}
}
pub fn number_of_rows(&self) -> i32 {
self.row_metas.len() as i32
}
pub fn update_row(&mut self, changeset: RowMetaChangeset) -> CollaborateResult<Option<GridBlockMetaChange>> {
let row_id = changeset.row_id.clone();
self.modify_row(&row_id, |row| {
let mut is_changed = None;
if let Some(height) = changeset.height {
row.height = height;
is_changed = Some(());
}
if let Some(visibility) = changeset.visibility {
row.visibility = visibility;
is_changed = Some(());
}
if !changeset.cell_by_field_id.is_empty() {
is_changed = Some(());
changeset.cell_by_field_id.into_iter().for_each(|(field_id, cell)| {
row.cell_by_field_id.insert(field_id, cell);
})
}
Ok(is_changed)
})
}
pub fn modify<F>(&mut self, f: F) -> CollaborateResult<Option<GridBlockMetaChange>>
where
F: for<'a> FnOnce(&'a mut Vec<Arc<RowMeta>>) -> CollaborateResult<Option<()>>,
{
let cloned_self = self.clone();
match f(&mut self.row_metas)? {
None => Ok(None),
Some(_) => {
let old = cloned_self.to_json()?;
let new = self.to_json()?;
match cal_diff::<PlainTextAttributes>(old, new) {
None => Ok(None),
Some(delta) => {
self.delta = self.delta.compose(&delta)?;
Ok(Some(GridBlockMetaChange { delta, md5: self.md5() }))
}
}
}
}
}
fn modify_row<F>(&mut self, row_id: &str, f: F) -> CollaborateResult<Option<GridBlockMetaChange>>
where
F: FnOnce(&mut RowMeta) -> CollaborateResult<Option<()>>,
{
self.modify(|rows| {
if let Some(row_meta) = rows.iter_mut().find(|row_meta| row_id == row_meta.id) {
f(Arc::make_mut(row_meta))
} else {
tracing::warn!("[BlockMetaPad]: Can't find any row with id: {}", row_id);
Ok(None)
}
})
}
pub fn to_json(&self) -> CollaborateResult<String> {
serde_json::to_string(self)
.map_err(|e| CollaborateError::internal().context(format!("serial trash to json failed: {}", e)))
}
pub fn md5(&self) -> String {
md5(&self.delta.to_delta_bytes())
}
pub fn delta_str(&self) -> String {
self.delta.to_delta_str()
}
}
pub struct GridBlockMetaChange {
pub delta: GridBlockMetaDelta,
/// md5: the md5 of the grid after applying the change.
pub md5: String,
}
pub fn make_block_meta_delta(grid_block_meta_data: &GridBlockMetaSerde) -> GridBlockMetaDelta {
let json = serde_json::to_string(&grid_block_meta_data).unwrap();
PlainTextDeltaBuilder::new().insert(&json).build()
}
pub fn make_block_meta_revisions(user_id: &str, grid_block_meta_data: &GridBlockMetaSerde) -> RepeatedRevision {
let delta = make_block_meta_delta(grid_block_meta_data);
let bytes = delta.to_delta_bytes();
let revision = Revision::initial_revision(user_id, &grid_block_meta_data.block_id, bytes);
revision.into()
}
impl std::default::Default for GridBlockMetaPad {
fn default() -> Self {
let block_meta_data = GridBlockMetaSerde {
block_id: uuid(),
row_metas: vec![],
};
let delta = make_block_meta_delta(&block_meta_data);
GridBlockMetaPad {
block_id: block_meta_data.block_id,
row_metas: block_meta_data.row_metas.into_iter().map(Arc::new).collect::<Vec<_>>(),
delta,
}
}
}
#[cfg(test)]
mod tests {
use crate::client_grid::{GridBlockMetaDelta, GridBlockMetaPad};
use flowy_grid_data_model::entities::{RowMeta, RowMetaChangeset};
#[test]
fn block_meta_add_row() {
let mut pad = test_pad();
let row = RowMeta {
id: "1".to_string(),
block_id: pad.block_id.clone(),
cell_by_field_id: Default::default(),
height: 0,
visibility: false,
};
let change = pad.add_row(row, None).unwrap().unwrap();
assert_eq!(
change.delta.to_delta_str(),
r#"[{"retain":29},{"insert":"{\"id\":\"1\",\"block_id\":\"1\",\"cell_by_field_id\":{},\"height\":0,\"visibility\":false}"},{"retain":2}]"#
);
}
#[test]
fn block_meta_insert_row() {
let mut pad = test_pad();
let row_1 = test_row_meta("1", &pad);
let row_2 = test_row_meta("2", &pad);
let row_3 = test_row_meta("3", &pad);
let change = pad.add_row(row_1.clone(), None).unwrap().unwrap();
assert_eq!(
change.delta.to_delta_str(),
r#"[{"retain":29},{"insert":"{\"id\":\"1\",\"block_id\":\"1\",\"cell_by_field_id\":{},\"height\":0,\"visibility\":false}"},{"retain":2}]"#
);
let change = pad.add_row(row_2.clone(), None).unwrap().unwrap();
assert_eq!(
change.delta.to_delta_str(),
r#"[{"retain":106},{"insert":",{\"id\":\"2\",\"block_id\":\"1\",\"cell_by_field_id\":{},\"height\":0,\"visibility\":false}"},{"retain":2}]"#
);
let change = pad.add_row(row_3.clone(), Some("2".to_string())).unwrap().unwrap();
assert_eq!(
change.delta.to_delta_str(),
r#"[{"retain":114},{"insert":"3\",\"block_id\":\"1\",\"cell_by_field_id\":{},\"height\":0,\"visibility\":false},{\"id\":\""},{"retain":72}]"#
);
assert_eq!(*pad.row_metas[0], row_1);
assert_eq!(*pad.row_metas[1], row_3);
assert_eq!(*pad.row_metas[2], row_2);
}
fn test_row_meta(id: &str, pad: &GridBlockMetaPad) -> RowMeta {
RowMeta {
id: id.to_string(),
block_id: pad.block_id.clone(),
cell_by_field_id: Default::default(),
height: 0,
visibility: false,
}
}
#[test]
fn block_meta_insert_row2() {
let mut pad = test_pad();
let row_1 = test_row_meta("1", &pad);
let row_2 = test_row_meta("2", &pad);
let row_3 = test_row_meta("3", &pad);
let _ = pad.add_row(row_1.clone(), None).unwrap().unwrap();
let _ = pad.add_row(row_2.clone(), None).unwrap().unwrap();
let _ = pad.add_row(row_3.clone(), Some("1".to_string())).unwrap().unwrap();
assert_eq!(*pad.row_metas[0], row_3);
assert_eq!(*pad.row_metas[1], row_1);
assert_eq!(*pad.row_metas[2], row_2);
}
#[test]
fn block_meta_insert_row3() {
let mut pad = test_pad();
let row_1 = test_row_meta("1", &pad);
let row_2 = test_row_meta("2", &pad);
let row_3 = test_row_meta("3", &pad);
let _ = pad.add_row(row_1.clone(), None).unwrap().unwrap();
let _ = pad.add_row(row_2.clone(), None).unwrap().unwrap();
let _ = pad.add_row(row_3.clone(), Some("".to_string())).unwrap().unwrap();
assert_eq!(*pad.row_metas[0], row_3);
assert_eq!(*pad.row_metas[1], row_1);
assert_eq!(*pad.row_metas[2], row_2);
}
#[test]
fn block_meta_delete_row() {
let mut pad = test_pad();
let pre_delta_str = pad.delta_str();
let row = RowMeta {
id: "1".to_string(),
block_id: pad.block_id.clone(),
cell_by_field_id: Default::default(),
height: 0,
visibility: false,
};
let _ = pad.add_row(row.clone(), None).unwrap().unwrap();
let change = pad.delete_rows(&[row.id]).unwrap().unwrap();
assert_eq!(
change.delta.to_delta_str(),
r#"[{"retain":29},{"delete":77},{"retain":2}]"#
);
assert_eq!(pad.delta_str(), pre_delta_str);
}
#[test]
fn block_meta_update_row() {
let mut pad = test_pad();
let row = RowMeta {
id: "1".to_string(),
block_id: pad.block_id.clone(),
cell_by_field_id: Default::default(),
height: 0,
visibility: false,
};
let changeset = RowMetaChangeset {
row_id: row.id.clone(),
height: Some(100),
visibility: Some(true),
cell_by_field_id: Default::default(),
};
let _ = pad.add_row(row, None).unwrap().unwrap();
let change = pad.update_row(changeset).unwrap().unwrap();
assert_eq!(
change.delta.to_delta_str(),
r#"[{"retain":85},{"insert":"10"},{"retain":15},{"insert":"tru"},{"delete":4},{"retain":4}]"#
);
assert_eq!(
pad.to_json().unwrap(),
r#"{"block_id":"1","row_metas":[{"id":"1","block_id":"1","cell_by_field_id":{},"height":100,"visibility":true}]}"#
);
}
fn test_pad() -> GridBlockMetaPad {
let delta =
GridBlockMetaDelta::from_delta_str(r#"[{"insert":"{\"block_id\":\"1\",\"row_metas\":[]}"}]"#).unwrap();
GridBlockMetaPad::from_delta(delta).unwrap()
}
}

View File

@ -0,0 +1,69 @@
use crate::errors::{CollaborateError, CollaborateResult};
use flowy_grid_data_model::entities::{BuildGridContext, FieldMeta, RowMeta};
#[derive(Default)]
pub struct GridBuilder {
build_context: BuildGridContext,
}
impl GridBuilder {
pub fn add_field(mut self, field: FieldMeta) -> Self {
self.build_context.field_metas.push(field);
self
}
pub fn add_empty_row(mut self) -> Self {
let row = RowMeta::new(&self.build_context.block_metas.block_id);
self.build_context.block_meta_data.row_metas.push(row);
self.build_context.block_metas.row_count += 1;
self
}
pub fn build(self) -> BuildGridContext {
self.build_context
}
}
#[allow(dead_code)]
fn check_rows(fields: &[FieldMeta], rows: &[RowMeta]) -> CollaborateResult<()> {
let field_ids = fields.iter().map(|field| &field.id).collect::<Vec<&String>>();
for row in rows {
let cell_field_ids = row.cell_by_field_id.keys().into_iter().collect::<Vec<&String>>();
if cell_field_ids != field_ids {
let msg = format!("{:?} contains invalid cells", row);
return Err(CollaborateError::internal().context(msg));
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use crate::client_grid::{make_block_meta_delta, make_grid_delta, GridBuilder};
use flowy_grid_data_model::entities::{FieldMeta, FieldType, GridBlockMetaSerde, GridMeta};
#[test]
fn create_default_grid_test() {
let grid_id = "1".to_owned();
let build_context = GridBuilder::default()
.add_field(FieldMeta::new("Name", "", FieldType::RichText))
.add_field(FieldMeta::new("Tags", "", FieldType::SingleSelect))
.add_empty_row()
.add_empty_row()
.add_empty_row()
.build();
let grid_meta = GridMeta {
grid_id,
fields: build_context.field_metas,
block_metas: vec![build_context.block_metas],
};
let grid_meta_delta = make_grid_delta(&grid_meta);
let _: GridMeta = serde_json::from_str(&grid_meta_delta.to_str().unwrap()).unwrap();
let grid_block_meta_delta = make_block_meta_delta(&build_context.block_meta_data);
let _: GridBlockMetaSerde = serde_json::from_str(&grid_block_meta_delta.to_str().unwrap()).unwrap();
}
}

View File

@ -0,0 +1,290 @@
use crate::entities::revision::{md5, RepeatedRevision, Revision};
use crate::errors::{internal_error, CollaborateError, CollaborateResult};
use crate::util::{cal_diff, make_delta_from_revisions};
use bytes::Bytes;
use flowy_grid_data_model::entities::{
FieldChangeset, FieldMeta, FieldOrder, GridBlockMeta, GridBlockMetaChangeset, GridMeta, RepeatedFieldOrder,
};
use lib_infra::uuid;
use lib_ot::core::{OperationTransformable, PlainTextAttributes, PlainTextDelta, PlainTextDeltaBuilder};
use std::collections::HashMap;
use std::sync::Arc;
pub type GridMetaDelta = PlainTextDelta;
pub type GridDeltaBuilder = PlainTextDeltaBuilder;
pub struct GridMetaPad {
pub(crate) grid_meta: Arc<GridMeta>,
pub(crate) delta: GridMetaDelta,
}
impl GridMetaPad {
pub fn from_delta(delta: GridMetaDelta) -> CollaborateResult<Self> {
let s = delta.to_str()?;
let grid: GridMeta = serde_json::from_str(&s)
.map_err(|e| CollaborateError::internal().context(format!("Deserialize delta to grid failed: {}", e)))?;
Ok(Self {
grid_meta: Arc::new(grid),
delta,
})
}
pub fn from_revisions(_grid_id: &str, revisions: Vec<Revision>) -> CollaborateResult<Self> {
let grid_delta: GridMetaDelta = make_delta_from_revisions::<PlainTextAttributes>(revisions)?;
Self::from_delta(grid_delta)
}
pub fn create_field(&mut self, field_meta: FieldMeta) -> CollaborateResult<Option<GridChangeset>> {
self.modify_grid(|grid| {
if grid.fields.contains(&field_meta) {
tracing::warn!("Duplicate grid field");
Ok(None)
} else {
grid.fields.push(field_meta);
Ok(Some(()))
}
})
}
pub fn delete_field(&mut self, field_id: &str) -> CollaborateResult<Option<GridChangeset>> {
self.modify_grid(|grid| match grid.fields.iter().position(|field| field.id == field_id) {
None => Ok(None),
Some(index) => {
grid.fields.remove(index);
Ok(Some(()))
}
})
}
pub fn contain_field(&self, field_id: &str) -> bool {
self.grid_meta.fields.iter().any(|field| field.id == field_id)
}
pub fn get_field(&self, field_id: &str) -> Option<&FieldMeta> {
self.grid_meta.fields.iter().find(|field| field.id == field_id)
}
pub fn get_field_orders(&self) -> Vec<FieldOrder> {
self.grid_meta.fields.iter().map(FieldOrder::from).collect()
}
pub fn get_field_metas(&self, field_orders: Option<RepeatedFieldOrder>) -> CollaborateResult<Vec<FieldMeta>> {
match field_orders {
None => Ok(self.grid_meta.fields.clone()),
Some(field_orders) => {
let field_by_field_id = self
.grid_meta
.fields
.iter()
.map(|field| (&field.id, field))
.collect::<HashMap<&String, &FieldMeta>>();
let fields = field_orders
.iter()
.flat_map(|field_order| match field_by_field_id.get(&field_order.field_id) {
None => {
tracing::error!("Can't find the field with id: {}", field_order.field_id);
None
}
Some(field) => Some((*field).clone()),
})
.collect::<Vec<FieldMeta>>();
Ok(fields)
}
}
}
pub fn update_field(&mut self, changeset: FieldChangeset) -> CollaborateResult<Option<GridChangeset>> {
let field_id = changeset.field_id.clone();
self.modify_field(&field_id, |field| {
let mut is_changed = None;
if let Some(name) = changeset.name {
field.name = name;
is_changed = Some(())
}
if let Some(desc) = changeset.desc {
field.desc = desc;
is_changed = Some(())
}
if let Some(field_type) = changeset.field_type {
field.field_type = field_type;
is_changed = Some(())
}
if let Some(frozen) = changeset.frozen {
field.frozen = frozen;
is_changed = Some(())
}
if let Some(visibility) = changeset.visibility {
field.visibility = visibility;
is_changed = Some(())
}
if let Some(width) = changeset.width {
field.width = width;
is_changed = Some(())
}
if let Some(type_options) = changeset.type_options {
field.type_options = type_options;
is_changed = Some(())
}
Ok(is_changed)
})
}
pub fn create_block(&mut self, block: GridBlockMeta) -> CollaborateResult<Option<GridChangeset>> {
self.modify_grid(|grid| {
if grid.block_metas.iter().any(|b| b.block_id == block.block_id) {
tracing::warn!("Duplicate grid block");
Ok(None)
} else {
match grid.block_metas.last() {
None => grid.block_metas.push(block),
Some(last_block) => {
if last_block.start_row_index > block.start_row_index
&& last_block.len() > block.start_row_index
{
let msg = "GridBlock's start_row_index should be greater than the last_block's start_row_index and its len".to_string();
return Err(CollaborateError::internal().context(msg))
}
grid.block_metas.push(block);
}
}
Ok(Some(()))
}
})
}
pub fn get_blocks(&self) -> Vec<GridBlockMeta> {
self.grid_meta.block_metas.clone()
}
pub fn update_block(&mut self, changeset: GridBlockMetaChangeset) -> CollaborateResult<Option<GridChangeset>> {
let block_id = changeset.block_id.clone();
self.modify_block(&block_id, |block| {
let mut is_changed = None;
if let Some(row_count) = changeset.row_count {
block.row_count = row_count;
is_changed = Some(());
}
if let Some(start_row_index) = changeset.start_row_index {
block.start_row_index = start_row_index;
is_changed = Some(());
}
Ok(is_changed)
})
}
pub fn md5(&self) -> String {
md5(&self.delta.to_delta_bytes())
}
pub fn delta_str(&self) -> String {
self.delta.to_delta_str()
}
pub fn delta_bytes(&self) -> Bytes {
self.delta.to_delta_bytes()
}
pub fn fields(&self) -> &[FieldMeta] {
&self.grid_meta.fields
}
fn modify_grid<F>(&mut self, f: F) -> CollaborateResult<Option<GridChangeset>>
where
F: FnOnce(&mut GridMeta) -> CollaborateResult<Option<()>>,
{
let cloned_grid = self.grid_meta.clone();
match f(Arc::make_mut(&mut self.grid_meta))? {
None => Ok(None),
Some(_) => {
let old = json_from_grid(&cloned_grid)?;
let new = json_from_grid(&self.grid_meta)?;
match cal_diff::<PlainTextAttributes>(old, new) {
None => Ok(None),
Some(delta) => {
self.delta = self.delta.compose(&delta)?;
Ok(Some(GridChangeset { delta, md5: self.md5() }))
}
}
}
}
}
pub fn modify_block<F>(&mut self, block_id: &str, f: F) -> CollaborateResult<Option<GridChangeset>>
where
F: FnOnce(&mut GridBlockMeta) -> CollaborateResult<Option<()>>,
{
self.modify_grid(
|grid| match grid.block_metas.iter().position(|block| block.block_id == block_id) {
None => {
tracing::warn!("[GridMetaPad]: Can't find any block with id: {}", block_id);
Ok(None)
}
Some(index) => f(&mut grid.block_metas[index]),
},
)
}
pub fn modify_field<F>(&mut self, field_id: &str, f: F) -> CollaborateResult<Option<GridChangeset>>
where
F: FnOnce(&mut FieldMeta) -> CollaborateResult<Option<()>>,
{
self.modify_grid(|grid| match grid.fields.iter().position(|field| field.id == field_id) {
None => {
tracing::warn!("[GridMetaPad]: Can't find any field with id: {}", field_id);
Ok(None)
}
Some(index) => f(&mut grid.fields[index]),
})
}
}
fn json_from_grid(grid: &Arc<GridMeta>) -> CollaborateResult<String> {
let json = serde_json::to_string(grid)
.map_err(|err| internal_error(format!("Serialize grid to json str failed. {:?}", err)))?;
Ok(json)
}
pub struct GridChangeset {
pub delta: GridMetaDelta,
/// md5: the md5 of the grid after applying the change.
pub md5: String,
}
pub fn make_grid_delta(grid_meta: &GridMeta) -> GridMetaDelta {
let json = serde_json::to_string(&grid_meta).unwrap();
PlainTextDeltaBuilder::new().insert(&json).build()
}
pub fn make_grid_revisions(user_id: &str, grid_meta: &GridMeta) -> RepeatedRevision {
let delta = make_grid_delta(grid_meta);
let bytes = delta.to_delta_bytes();
let revision = Revision::initial_revision(user_id, &grid_meta.grid_id, bytes);
revision.into()
}
impl std::default::Default for GridMetaPad {
fn default() -> Self {
let grid = GridMeta {
grid_id: uuid(),
fields: vec![],
block_metas: vec![],
};
let delta = make_grid_delta(&grid);
GridMetaPad {
grid_meta: Arc::new(grid),
delta,
}
}
}

View File

@ -0,0 +1,7 @@
mod grid_block_meta_pad;
mod grid_builder;
mod grid_meta_pad;
pub use grid_block_meta_pad::*;
pub use grid_builder::*;
pub use grid_meta_pad::*;

View File

@ -0,0 +1,19 @@
use flowy_derive::ProtoBuf;
use lib_ot::core::PlainTextDelta;
pub type FolderDelta = PlainTextDelta;
#[derive(ProtoBuf, Default, Debug, Clone, Eq, PartialEq)]
pub struct FolderInfo {
#[pb(index = 1)]
pub folder_id: String,
#[pb(index = 2)]
pub text: String,
#[pb(index = 3)]
pub rev_id: i64,
#[pb(index = 4)]
pub base_rev_id: i64,
}

View File

@ -0,0 +1,5 @@
pub mod folder_info;
pub mod parser;
pub mod revision;
pub mod text_block_info;
pub mod ws_data;

View File

@ -0,0 +1,18 @@
#[derive(Debug)]
pub struct DocumentIdentify(pub String);
impl DocumentIdentify {
pub fn parse(s: String) -> Result<DocumentIdentify, String> {
if s.trim().is_empty() {
return Err("Doc id can not be empty or whitespace".to_string());
}
Ok(Self(s))
}
}
impl AsRef<str> for DocumentIdentify {
fn as_ref(&self) -> &str {
&self.0
}
}

View File

@ -0,0 +1,3 @@
mod doc_id;
pub use doc_id::*;

View File

@ -0,0 +1,228 @@
use bytes::Bytes;
use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
use lib_ot::rich_text::RichTextDelta;
use std::{convert::TryFrom, fmt::Formatter, ops::RangeInclusive};
#[derive(PartialEq, Eq, Clone, Default, ProtoBuf)]
pub struct Revision {
#[pb(index = 1)]
pub base_rev_id: i64,
#[pb(index = 2)]
pub rev_id: i64,
#[pb(index = 3)]
pub delta_data: Vec<u8>,
#[pb(index = 4)]
pub md5: String,
#[pb(index = 5)]
pub object_id: String,
#[pb(index = 6)]
ty: RevType, // Deprecated
#[pb(index = 7)]
pub user_id: String,
}
impl std::convert::From<Vec<u8>> for Revision {
fn from(data: Vec<u8>) -> Self {
let bytes = Bytes::from(data);
Revision::try_from(bytes).unwrap()
}
}
impl Revision {
pub fn is_empty(&self) -> bool {
self.base_rev_id == self.rev_id
}
pub fn pair_rev_id(&self) -> (i64, i64) {
(self.base_rev_id, self.rev_id)
}
pub fn is_initial(&self) -> bool {
self.rev_id == 0
}
pub fn initial_revision(user_id: &str, object_id: &str, delta_data: Bytes) -> Self {
let md5 = md5(&delta_data);
Self::new(object_id, 0, 0, delta_data, user_id, md5)
}
pub fn new(
object_id: &str,
base_rev_id: i64,
rev_id: i64,
delta_data: Bytes,
user_id: &str,
md5: String,
) -> Revision {
let user_id = user_id.to_owned();
let object_id = object_id.to_owned();
let delta_data = delta_data.to_vec();
let base_rev_id = base_rev_id;
let rev_id = rev_id;
if base_rev_id != 0 {
debug_assert!(base_rev_id != rev_id);
}
Self {
base_rev_id,
rev_id,
delta_data,
md5,
object_id,
ty: RevType::DeprecatedLocal,
user_id,
}
}
}
impl std::fmt::Debug for Revision {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
let _ = f.write_fmt(format_args!("object_id {}, ", self.object_id))?;
let _ = f.write_fmt(format_args!("base_rev_id {}, ", self.base_rev_id))?;
let _ = f.write_fmt(format_args!("rev_id {}, ", self.rev_id))?;
match RichTextDelta::from_bytes(&self.delta_data) {
Ok(delta) => {
let _ = f.write_fmt(format_args!("delta {:?}", delta.to_delta_str()))?;
}
Err(e) => {
let _ = f.write_fmt(format_args!("delta {:?}", e))?;
}
}
Ok(())
}
}
#[derive(PartialEq, Debug, Default, ProtoBuf, Clone)]
pub struct RepeatedRevision {
#[pb(index = 1)]
items: Vec<Revision>,
}
impl std::ops::Deref for RepeatedRevision {
type Target = Vec<Revision>;
fn deref(&self) -> &Self::Target {
&self.items
}
}
impl std::ops::DerefMut for RepeatedRevision {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.items
}
}
impl std::convert::From<Revision> for RepeatedRevision {
fn from(revision: Revision) -> Self {
Self { items: vec![revision] }
}
}
impl RepeatedRevision {
pub fn new(mut items: Vec<Revision>) -> Self {
items.sort_by(|a, b| a.rev_id.cmp(&b.rev_id));
Self { items }
}
pub fn empty() -> Self {
RepeatedRevision { items: vec![] }
}
pub fn into_inner(self) -> Vec<Revision> {
self.items
}
}
#[derive(Clone, Debug, ProtoBuf, Default)]
pub struct RevId {
#[pb(index = 1)]
pub value: i64,
}
impl AsRef<i64> for RevId {
fn as_ref(&self) -> &i64 {
&self.value
}
}
impl std::convert::From<RevId> for i64 {
fn from(rev_id: RevId) -> Self {
rev_id.value
}
}
impl std::convert::From<i64> for RevId {
fn from(value: i64) -> Self {
RevId { value }
}
}
impl std::fmt::Display for RevId {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_fmt(format_args!("{}", self.value))
}
}
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct RevisionRange {
#[pb(index = 1)]
pub start: i64,
#[pb(index = 2)]
pub end: i64,
}
impl std::fmt::Display for RevisionRange {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_fmt(format_args!("[{},{}]", self.start, self.end))
}
}
impl RevisionRange {
pub fn len(&self) -> i64 {
debug_assert!(self.end >= self.start);
if self.end >= self.start {
self.end - self.start + 1
} else {
0
}
}
pub fn is_empty(&self) -> bool {
self.end == self.start
}
pub fn iter(&self) -> RangeInclusive<i64> {
// debug_assert!(self.start != self.end);
RangeInclusive::new(self.start, self.end)
}
pub fn to_rev_ids(&self) -> Vec<i64> {
self.iter().collect::<Vec<_>>()
}
}
#[inline]
pub fn md5<T: AsRef<[u8]>>(data: T) -> String {
let md5 = format!("{:x}", md5::compute(data));
md5
}
#[derive(Debug, ProtoBuf_Enum, Clone, Eq, PartialEq)]
pub enum RevType {
DeprecatedLocal = 0,
DeprecatedRemote = 1,
}
impl std::default::Default for RevType {
fn default() -> Self {
RevType::DeprecatedLocal
}
}

View File

@ -0,0 +1,117 @@
use crate::{
entities::revision::{RepeatedRevision, Revision},
errors::CollaborateError,
};
use flowy_derive::ProtoBuf;
use lib_ot::{errors::OTError, rich_text::RichTextDelta};
#[derive(ProtoBuf, Default, Debug, Clone)]
pub struct CreateTextBlockParams {
#[pb(index = 1)]
pub id: String,
#[pb(index = 2)]
pub revisions: RepeatedRevision,
}
#[derive(ProtoBuf, Default, Debug, Clone, Eq, PartialEq)]
pub struct TextBlockInfo {
#[pb(index = 1)]
pub block_id: String,
#[pb(index = 2)]
pub text: String,
#[pb(index = 3)]
pub rev_id: i64,
#[pb(index = 4)]
pub base_rev_id: i64,
}
impl TextBlockInfo {
pub fn delta(&self) -> Result<RichTextDelta, OTError> {
let delta = RichTextDelta::from_bytes(&self.text)?;
Ok(delta)
}
}
impl std::convert::TryFrom<Revision> for TextBlockInfo {
type Error = CollaborateError;
fn try_from(revision: Revision) -> Result<Self, Self::Error> {
if !revision.is_initial() {
return Err(CollaborateError::revision_conflict()
.context("Revision's rev_id should be 0 when creating the document"));
}
let delta = RichTextDelta::from_bytes(&revision.delta_data)?;
let doc_json = delta.to_delta_str();
Ok(TextBlockInfo {
block_id: revision.object_id,
text: doc_json,
rev_id: revision.rev_id,
base_rev_id: revision.base_rev_id,
})
}
}
#[derive(ProtoBuf, Default, Debug, Clone)]
pub struct ResetTextBlockParams {
#[pb(index = 1)]
pub block_id: String,
#[pb(index = 2)]
pub revisions: RepeatedRevision,
}
#[derive(ProtoBuf, Default, Debug, Clone)]
pub struct TextBlockDelta {
#[pb(index = 1)]
pub block_id: String,
#[pb(index = 2)]
pub delta_str: String,
}
#[derive(ProtoBuf, Default, Debug, Clone)]
pub struct NewDocUser {
#[pb(index = 1)]
pub user_id: String,
#[pb(index = 2)]
pub rev_id: i64,
#[pb(index = 3)]
pub doc_id: String,
}
#[derive(ProtoBuf, Default, Debug, Clone)]
pub struct TextBlockId {
#[pb(index = 1)]
pub value: String,
}
impl AsRef<str> for TextBlockId {
fn as_ref(&self) -> &str {
&self.value
}
}
impl std::convert::From<String> for TextBlockId {
fn from(value: String) -> Self {
TextBlockId { value }
}
}
impl std::convert::From<TextBlockId> for String {
fn from(block_id: TextBlockId) -> Self {
block_id.value
}
}
impl std::convert::From<&String> for TextBlockId {
fn from(s: &String) -> Self {
TextBlockId { value: s.to_owned() }
}
}

View File

@ -0,0 +1,142 @@
use crate::{
entities::revision::{RepeatedRevision, RevId, Revision, RevisionRange},
errors::CollaborateError,
};
use bytes::Bytes;
use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
use std::convert::{TryFrom, TryInto};
#[derive(Debug, Clone, ProtoBuf_Enum, Eq, PartialEq, Hash)]
pub enum ClientRevisionWSDataType {
ClientPushRev = 0,
ClientPing = 1,
}
impl ClientRevisionWSDataType {
pub fn data<T>(&self, bytes: Bytes) -> Result<T, CollaborateError>
where
T: TryFrom<Bytes, Error = CollaborateError>,
{
T::try_from(bytes)
}
}
impl std::default::Default for ClientRevisionWSDataType {
fn default() -> Self {
ClientRevisionWSDataType::ClientPushRev
}
}
#[derive(ProtoBuf, Default, Debug, Clone)]
pub struct ClientRevisionWSData {
#[pb(index = 1)]
pub object_id: String,
#[pb(index = 2)]
pub ty: ClientRevisionWSDataType,
#[pb(index = 3)]
pub revisions: RepeatedRevision,
#[pb(index = 4)]
data_id: String,
}
impl ClientRevisionWSData {
pub fn from_revisions(object_id: &str, revisions: Vec<Revision>) -> Self {
let rev_id = match revisions.first() {
None => 0,
Some(revision) => revision.rev_id,
};
Self {
object_id: object_id.to_owned(),
ty: ClientRevisionWSDataType::ClientPushRev,
revisions: RepeatedRevision::new(revisions),
data_id: rev_id.to_string(),
}
}
pub fn ping(object_id: &str, rev_id: i64) -> Self {
Self {
object_id: object_id.to_owned(),
ty: ClientRevisionWSDataType::ClientPing,
revisions: RepeatedRevision::empty(),
data_id: rev_id.to_string(),
}
}
pub fn id(&self) -> String {
self.data_id.clone()
}
}
#[derive(Debug, Clone, ProtoBuf_Enum, Eq, PartialEq, Hash)]
pub enum ServerRevisionWSDataType {
ServerAck = 0,
ServerPushRev = 1,
ServerPullRev = 2,
UserConnect = 3,
}
impl std::default::Default for ServerRevisionWSDataType {
fn default() -> Self {
ServerRevisionWSDataType::ServerPushRev
}
}
#[derive(ProtoBuf, Default, Debug, Clone)]
pub struct ServerRevisionWSData {
#[pb(index = 1)]
pub object_id: String,
#[pb(index = 2)]
pub ty: ServerRevisionWSDataType,
#[pb(index = 3)]
pub data: Vec<u8>,
}
pub struct ServerRevisionWSDataBuilder();
impl ServerRevisionWSDataBuilder {
pub fn build_push_message(object_id: &str, repeated_revision: RepeatedRevision) -> ServerRevisionWSData {
let bytes: Bytes = repeated_revision.try_into().unwrap();
ServerRevisionWSData {
object_id: object_id.to_string(),
ty: ServerRevisionWSDataType::ServerPushRev,
data: bytes.to_vec(),
}
}
pub fn build_pull_message(object_id: &str, range: RevisionRange) -> ServerRevisionWSData {
let bytes: Bytes = range.try_into().unwrap();
ServerRevisionWSData {
object_id: object_id.to_string(),
ty: ServerRevisionWSDataType::ServerPullRev,
data: bytes.to_vec(),
}
}
pub fn build_ack_message(object_id: &str, rev_id: i64) -> ServerRevisionWSData {
let rev_id: RevId = rev_id.into();
let bytes: Bytes = rev_id.try_into().unwrap();
ServerRevisionWSData {
object_id: object_id.to_string(),
ty: ServerRevisionWSDataType::ServerAck,
data: bytes.to_vec(),
}
}
}
#[derive(ProtoBuf, Default, Debug, Clone)]
pub struct NewDocumentUser {
#[pb(index = 1)]
pub user_id: String,
#[pb(index = 2)]
pub doc_id: String,
// revision_data: the latest rev_id of the document.
#[pb(index = 3)]
pub revision_data: Vec<u8>,
}

View File

@ -0,0 +1,80 @@
use std::{fmt, fmt::Debug};
use strum_macros::Display;
macro_rules! static_doc_error {
($name:ident, $status:expr) => {
#[allow(non_snake_case, missing_docs)]
pub fn $name() -> CollaborateError {
CollaborateError {
code: $status,
msg: format!("{}", $status),
}
}
};
}
pub type CollaborateResult<T> = std::result::Result<T, CollaborateError>;
#[derive(Debug, Clone)]
pub struct CollaborateError {
pub code: ErrorCode,
pub msg: String,
}
impl CollaborateError {
fn new(code: ErrorCode, msg: &str) -> Self {
Self {
code,
msg: msg.to_owned(),
}
}
pub fn context<T: Debug>(mut self, error: T) -> Self {
self.msg = format!("{:?}", error);
self
}
static_doc_error!(internal, ErrorCode::InternalError);
static_doc_error!(undo, ErrorCode::UndoFail);
static_doc_error!(redo, ErrorCode::RedoFail);
static_doc_error!(out_of_bound, ErrorCode::OutOfBound);
static_doc_error!(record_not_found, ErrorCode::RecordNotFound);
static_doc_error!(revision_conflict, ErrorCode::RevisionConflict);
}
impl fmt::Display for CollaborateError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}: {}", &self.code, &self.msg)
}
}
#[derive(Debug, Clone, Display, PartialEq, Eq)]
pub enum ErrorCode {
DocIdInvalid = 0,
DocNotfound = 1,
UndoFail = 200,
RedoFail = 201,
OutOfBound = 202,
RevisionConflict = 203,
RecordNotFound = 300,
InternalError = 1000,
}
impl std::convert::From<lib_ot::errors::OTError> for CollaborateError {
fn from(error: lib_ot::errors::OTError) -> Self {
CollaborateError::new(ErrorCode::InternalError, "").context(error)
}
}
impl std::convert::From<protobuf::ProtobufError> for CollaborateError {
fn from(e: protobuf::ProtobufError) -> Self {
CollaborateError::internal().context(e)
}
}
pub(crate) fn internal_error<T>(e: T) -> CollaborateError
where
T: std::fmt::Debug,
{
CollaborateError::internal().context(e)
}

View File

@ -0,0 +1,12 @@
pub mod client_document;
pub mod client_folder;
pub mod client_grid;
pub mod entities;
pub mod errors;
pub mod protobuf;
pub mod server_document;
pub mod server_folder;
pub mod synchronizer;
pub mod util;
pub use lib_ot::rich_text::RichTextDelta;

View File

@ -0,0 +1,4 @@
#![cfg_attr(rustfmt, rustfmt::skip)]
// Auto-generated, do not edit
mod model;
pub use model::*;

View File

@ -0,0 +1,314 @@
// This file is generated by rust-protobuf 2.25.2. Do not edit
// @generated
// https://github.com/rust-lang/rust-clippy/issues/702
#![allow(unknown_lints)]
#![allow(clippy::all)]
#![allow(unused_attributes)]
#![cfg_attr(rustfmt, rustfmt::skip)]
#![allow(box_pointers)]
#![allow(dead_code)]
#![allow(missing_docs)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(non_upper_case_globals)]
#![allow(trivial_casts)]
#![allow(unused_imports)]
#![allow(unused_results)]
//! Generated file from `folder_info.proto`
/// Generated files are compatible only with the same version
/// of protobuf runtime.
// const _PROTOBUF_VERSION_CHECK: () = ::protobuf::VERSION_2_25_2;
#[derive(PartialEq,Clone,Default)]
pub struct FolderInfo {
// message fields
pub folder_id: ::std::string::String,
pub text: ::std::string::String,
pub rev_id: i64,
pub base_rev_id: i64,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a FolderInfo {
fn default() -> &'a FolderInfo {
<FolderInfo as ::protobuf::Message>::default_instance()
}
}
impl FolderInfo {
pub fn new() -> FolderInfo {
::std::default::Default::default()
}
// string folder_id = 1;
pub fn get_folder_id(&self) -> &str {
&self.folder_id
}
pub fn clear_folder_id(&mut self) {
self.folder_id.clear();
}
// Param is passed by value, moved
pub fn set_folder_id(&mut self, v: ::std::string::String) {
self.folder_id = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_folder_id(&mut self) -> &mut ::std::string::String {
&mut self.folder_id
}
// Take field
pub fn take_folder_id(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.folder_id, ::std::string::String::new())
}
// string text = 2;
pub fn get_text(&self) -> &str {
&self.text
}
pub fn clear_text(&mut self) {
self.text.clear();
}
// Param is passed by value, moved
pub fn set_text(&mut self, v: ::std::string::String) {
self.text = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_text(&mut self) -> &mut ::std::string::String {
&mut self.text
}
// Take field
pub fn take_text(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.text, ::std::string::String::new())
}
// int64 rev_id = 3;
pub fn get_rev_id(&self) -> i64 {
self.rev_id
}
pub fn clear_rev_id(&mut self) {
self.rev_id = 0;
}
// Param is passed by value, moved
pub fn set_rev_id(&mut self, v: i64) {
self.rev_id = v;
}
// int64 base_rev_id = 4;
pub fn get_base_rev_id(&self) -> i64 {
self.base_rev_id
}
pub fn clear_base_rev_id(&mut self) {
self.base_rev_id = 0;
}
// Param is passed by value, moved
pub fn set_base_rev_id(&mut self, v: i64) {
self.base_rev_id = v;
}
}
impl ::protobuf::Message for FolderInfo {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.folder_id)?;
},
2 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.text)?;
},
3 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_int64()?;
self.rev_id = tmp;
},
4 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_int64()?;
self.base_rev_id = tmp;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.folder_id.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.folder_id);
}
if !self.text.is_empty() {
my_size += ::protobuf::rt::string_size(2, &self.text);
}
if self.rev_id != 0 {
my_size += ::protobuf::rt::value_size(3, self.rev_id, ::protobuf::wire_format::WireTypeVarint);
}
if self.base_rev_id != 0 {
my_size += ::protobuf::rt::value_size(4, self.base_rev_id, ::protobuf::wire_format::WireTypeVarint);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.folder_id.is_empty() {
os.write_string(1, &self.folder_id)?;
}
if !self.text.is_empty() {
os.write_string(2, &self.text)?;
}
if self.rev_id != 0 {
os.write_int64(3, self.rev_id)?;
}
if self.base_rev_id != 0 {
os.write_int64(4, self.base_rev_id)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> FolderInfo {
FolderInfo::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"folder_id",
|m: &FolderInfo| { &m.folder_id },
|m: &mut FolderInfo| { &mut m.folder_id },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"text",
|m: &FolderInfo| { &m.text },
|m: &mut FolderInfo| { &mut m.text },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeInt64>(
"rev_id",
|m: &FolderInfo| { &m.rev_id },
|m: &mut FolderInfo| { &mut m.rev_id },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeInt64>(
"base_rev_id",
|m: &FolderInfo| { &m.base_rev_id },
|m: &mut FolderInfo| { &mut m.base_rev_id },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<FolderInfo>(
"FolderInfo",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static FolderInfo {
static instance: ::protobuf::rt::LazyV2<FolderInfo> = ::protobuf::rt::LazyV2::INIT;
instance.get(FolderInfo::new)
}
}
impl ::protobuf::Clear for FolderInfo {
fn clear(&mut self) {
self.folder_id.clear();
self.text.clear();
self.rev_id = 0;
self.base_rev_id = 0;
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for FolderInfo {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for FolderInfo {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
static file_descriptor_proto_data: &'static [u8] = b"\
\n\x11folder_info.proto\"t\n\nFolderInfo\x12\x1b\n\tfolder_id\x18\x01\
\x20\x01(\tR\x08folderId\x12\x12\n\x04text\x18\x02\x20\x01(\tR\x04text\
\x12\x15\n\x06rev_id\x18\x03\x20\x01(\x03R\x05revId\x12\x1e\n\x0bbase_re\
v_id\x18\x04\x20\x01(\x03R\tbaseRevIdb\x06proto3\
";
static file_descriptor_proto_lazy: ::protobuf::rt::LazyV2<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::rt::LazyV2::INIT;
fn parse_descriptor_proto() -> ::protobuf::descriptor::FileDescriptorProto {
::protobuf::Message::parse_from_bytes(file_descriptor_proto_data).unwrap()
}
pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {
file_descriptor_proto_lazy.get(|| {
parse_descriptor_proto()
})
}

View File

@ -0,0 +1,14 @@
#![cfg_attr(rustfmt, rustfmt::skip)]
// Auto-generated, do not edit
mod folder_info;
pub use folder_info::*;
mod ws_data;
pub use ws_data::*;
mod text_block_info;
pub use text_block_info::*;
mod revision;
pub use revision::*;

View File

@ -0,0 +1,992 @@
// This file is generated by rust-protobuf 2.25.2. Do not edit
// @generated
// https://github.com/rust-lang/rust-clippy/issues/702
#![allow(unknown_lints)]
#![allow(clippy::all)]
#![allow(unused_attributes)]
#![cfg_attr(rustfmt, rustfmt::skip)]
#![allow(box_pointers)]
#![allow(dead_code)]
#![allow(missing_docs)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(non_upper_case_globals)]
#![allow(trivial_casts)]
#![allow(unused_imports)]
#![allow(unused_results)]
//! Generated file from `revision.proto`
/// Generated files are compatible only with the same version
/// of protobuf runtime.
// const _PROTOBUF_VERSION_CHECK: () = ::protobuf::VERSION_2_25_2;
#[derive(PartialEq,Clone,Default)]
pub struct Revision {
// message fields
pub base_rev_id: i64,
pub rev_id: i64,
pub delta_data: ::std::vec::Vec<u8>,
pub md5: ::std::string::String,
pub object_id: ::std::string::String,
pub ty: RevType,
pub user_id: ::std::string::String,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a Revision {
fn default() -> &'a Revision {
<Revision as ::protobuf::Message>::default_instance()
}
}
impl Revision {
pub fn new() -> Revision {
::std::default::Default::default()
}
// int64 base_rev_id = 1;
pub fn get_base_rev_id(&self) -> i64 {
self.base_rev_id
}
pub fn clear_base_rev_id(&mut self) {
self.base_rev_id = 0;
}
// Param is passed by value, moved
pub fn set_base_rev_id(&mut self, v: i64) {
self.base_rev_id = v;
}
// int64 rev_id = 2;
pub fn get_rev_id(&self) -> i64 {
self.rev_id
}
pub fn clear_rev_id(&mut self) {
self.rev_id = 0;
}
// Param is passed by value, moved
pub fn set_rev_id(&mut self, v: i64) {
self.rev_id = v;
}
// bytes delta_data = 3;
pub fn get_delta_data(&self) -> &[u8] {
&self.delta_data
}
pub fn clear_delta_data(&mut self) {
self.delta_data.clear();
}
// Param is passed by value, moved
pub fn set_delta_data(&mut self, v: ::std::vec::Vec<u8>) {
self.delta_data = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_delta_data(&mut self) -> &mut ::std::vec::Vec<u8> {
&mut self.delta_data
}
// Take field
pub fn take_delta_data(&mut self) -> ::std::vec::Vec<u8> {
::std::mem::replace(&mut self.delta_data, ::std::vec::Vec::new())
}
// string md5 = 4;
pub fn get_md5(&self) -> &str {
&self.md5
}
pub fn clear_md5(&mut self) {
self.md5.clear();
}
// Param is passed by value, moved
pub fn set_md5(&mut self, v: ::std::string::String) {
self.md5 = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_md5(&mut self) -> &mut ::std::string::String {
&mut self.md5
}
// Take field
pub fn take_md5(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.md5, ::std::string::String::new())
}
// string object_id = 5;
pub fn get_object_id(&self) -> &str {
&self.object_id
}
pub fn clear_object_id(&mut self) {
self.object_id.clear();
}
// Param is passed by value, moved
pub fn set_object_id(&mut self, v: ::std::string::String) {
self.object_id = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_object_id(&mut self) -> &mut ::std::string::String {
&mut self.object_id
}
// Take field
pub fn take_object_id(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.object_id, ::std::string::String::new())
}
// .RevType ty = 6;
pub fn get_ty(&self) -> RevType {
self.ty
}
pub fn clear_ty(&mut self) {
self.ty = RevType::DeprecatedLocal;
}
// Param is passed by value, moved
pub fn set_ty(&mut self, v: RevType) {
self.ty = v;
}
// string user_id = 7;
pub fn get_user_id(&self) -> &str {
&self.user_id
}
pub fn clear_user_id(&mut self) {
self.user_id.clear();
}
// Param is passed by value, moved
pub fn set_user_id(&mut self, v: ::std::string::String) {
self.user_id = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_user_id(&mut self) -> &mut ::std::string::String {
&mut self.user_id
}
// Take field
pub fn take_user_id(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.user_id, ::std::string::String::new())
}
}
impl ::protobuf::Message for Revision {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_int64()?;
self.base_rev_id = tmp;
},
2 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_int64()?;
self.rev_id = tmp;
},
3 => {
::protobuf::rt::read_singular_proto3_bytes_into(wire_type, is, &mut self.delta_data)?;
},
4 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.md5)?;
},
5 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.object_id)?;
},
6 => {
::protobuf::rt::read_proto3_enum_with_unknown_fields_into(wire_type, is, &mut self.ty, 6, &mut self.unknown_fields)?
},
7 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.user_id)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if self.base_rev_id != 0 {
my_size += ::protobuf::rt::value_size(1, self.base_rev_id, ::protobuf::wire_format::WireTypeVarint);
}
if self.rev_id != 0 {
my_size += ::protobuf::rt::value_size(2, self.rev_id, ::protobuf::wire_format::WireTypeVarint);
}
if !self.delta_data.is_empty() {
my_size += ::protobuf::rt::bytes_size(3, &self.delta_data);
}
if !self.md5.is_empty() {
my_size += ::protobuf::rt::string_size(4, &self.md5);
}
if !self.object_id.is_empty() {
my_size += ::protobuf::rt::string_size(5, &self.object_id);
}
if self.ty != RevType::DeprecatedLocal {
my_size += ::protobuf::rt::enum_size(6, self.ty);
}
if !self.user_id.is_empty() {
my_size += ::protobuf::rt::string_size(7, &self.user_id);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if self.base_rev_id != 0 {
os.write_int64(1, self.base_rev_id)?;
}
if self.rev_id != 0 {
os.write_int64(2, self.rev_id)?;
}
if !self.delta_data.is_empty() {
os.write_bytes(3, &self.delta_data)?;
}
if !self.md5.is_empty() {
os.write_string(4, &self.md5)?;
}
if !self.object_id.is_empty() {
os.write_string(5, &self.object_id)?;
}
if self.ty != RevType::DeprecatedLocal {
os.write_enum(6, ::protobuf::ProtobufEnum::value(&self.ty))?;
}
if !self.user_id.is_empty() {
os.write_string(7, &self.user_id)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> Revision {
Revision::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeInt64>(
"base_rev_id",
|m: &Revision| { &m.base_rev_id },
|m: &mut Revision| { &mut m.base_rev_id },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeInt64>(
"rev_id",
|m: &Revision| { &m.rev_id },
|m: &mut Revision| { &mut m.rev_id },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeBytes>(
"delta_data",
|m: &Revision| { &m.delta_data },
|m: &mut Revision| { &mut m.delta_data },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"md5",
|m: &Revision| { &m.md5 },
|m: &mut Revision| { &mut m.md5 },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"object_id",
|m: &Revision| { &m.object_id },
|m: &mut Revision| { &mut m.object_id },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeEnum<RevType>>(
"ty",
|m: &Revision| { &m.ty },
|m: &mut Revision| { &mut m.ty },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"user_id",
|m: &Revision| { &m.user_id },
|m: &mut Revision| { &mut m.user_id },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<Revision>(
"Revision",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static Revision {
static instance: ::protobuf::rt::LazyV2<Revision> = ::protobuf::rt::LazyV2::INIT;
instance.get(Revision::new)
}
}
impl ::protobuf::Clear for Revision {
fn clear(&mut self) {
self.base_rev_id = 0;
self.rev_id = 0;
self.delta_data.clear();
self.md5.clear();
self.object_id.clear();
self.ty = RevType::DeprecatedLocal;
self.user_id.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for Revision {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for Revision {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct RepeatedRevision {
// message fields
pub items: ::protobuf::RepeatedField<Revision>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a RepeatedRevision {
fn default() -> &'a RepeatedRevision {
<RepeatedRevision as ::protobuf::Message>::default_instance()
}
}
impl RepeatedRevision {
pub fn new() -> RepeatedRevision {
::std::default::Default::default()
}
// repeated .Revision items = 1;
pub fn get_items(&self) -> &[Revision] {
&self.items
}
pub fn clear_items(&mut self) {
self.items.clear();
}
// Param is passed by value, moved
pub fn set_items(&mut self, v: ::protobuf::RepeatedField<Revision>) {
self.items = v;
}
// Mutable pointer to the field.
pub fn mut_items(&mut self) -> &mut ::protobuf::RepeatedField<Revision> {
&mut self.items
}
// Take field
pub fn take_items(&mut self) -> ::protobuf::RepeatedField<Revision> {
::std::mem::replace(&mut self.items, ::protobuf::RepeatedField::new())
}
}
impl ::protobuf::Message for RepeatedRevision {
fn is_initialized(&self) -> bool {
for v in &self.items {
if !v.is_initialized() {
return false;
}
};
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.items)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
for value in &self.items {
let len = value.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
};
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
for v in &self.items {
os.write_tag(1, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
};
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> RepeatedRevision {
RepeatedRevision::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<Revision>>(
"items",
|m: &RepeatedRevision| { &m.items },
|m: &mut RepeatedRevision| { &mut m.items },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<RepeatedRevision>(
"RepeatedRevision",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static RepeatedRevision {
static instance: ::protobuf::rt::LazyV2<RepeatedRevision> = ::protobuf::rt::LazyV2::INIT;
instance.get(RepeatedRevision::new)
}
}
impl ::protobuf::Clear for RepeatedRevision {
fn clear(&mut self) {
self.items.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for RepeatedRevision {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for RepeatedRevision {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct RevId {
// message fields
pub value: i64,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a RevId {
fn default() -> &'a RevId {
<RevId as ::protobuf::Message>::default_instance()
}
}
impl RevId {
pub fn new() -> RevId {
::std::default::Default::default()
}
// int64 value = 1;
pub fn get_value(&self) -> i64 {
self.value
}
pub fn clear_value(&mut self) {
self.value = 0;
}
// Param is passed by value, moved
pub fn set_value(&mut self, v: i64) {
self.value = v;
}
}
impl ::protobuf::Message for RevId {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_int64()?;
self.value = tmp;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if self.value != 0 {
my_size += ::protobuf::rt::value_size(1, self.value, ::protobuf::wire_format::WireTypeVarint);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if self.value != 0 {
os.write_int64(1, self.value)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> RevId {
RevId::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeInt64>(
"value",
|m: &RevId| { &m.value },
|m: &mut RevId| { &mut m.value },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<RevId>(
"RevId",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static RevId {
static instance: ::protobuf::rt::LazyV2<RevId> = ::protobuf::rt::LazyV2::INIT;
instance.get(RevId::new)
}
}
impl ::protobuf::Clear for RevId {
fn clear(&mut self) {
self.value = 0;
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for RevId {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for RevId {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct RevisionRange {
// message fields
pub start: i64,
pub end: i64,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a RevisionRange {
fn default() -> &'a RevisionRange {
<RevisionRange as ::protobuf::Message>::default_instance()
}
}
impl RevisionRange {
pub fn new() -> RevisionRange {
::std::default::Default::default()
}
// int64 start = 1;
pub fn get_start(&self) -> i64 {
self.start
}
pub fn clear_start(&mut self) {
self.start = 0;
}
// Param is passed by value, moved
pub fn set_start(&mut self, v: i64) {
self.start = v;
}
// int64 end = 2;
pub fn get_end(&self) -> i64 {
self.end
}
pub fn clear_end(&mut self) {
self.end = 0;
}
// Param is passed by value, moved
pub fn set_end(&mut self, v: i64) {
self.end = v;
}
}
impl ::protobuf::Message for RevisionRange {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_int64()?;
self.start = tmp;
},
2 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_int64()?;
self.end = tmp;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if self.start != 0 {
my_size += ::protobuf::rt::value_size(1, self.start, ::protobuf::wire_format::WireTypeVarint);
}
if self.end != 0 {
my_size += ::protobuf::rt::value_size(2, self.end, ::protobuf::wire_format::WireTypeVarint);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if self.start != 0 {
os.write_int64(1, self.start)?;
}
if self.end != 0 {
os.write_int64(2, self.end)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> RevisionRange {
RevisionRange::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeInt64>(
"start",
|m: &RevisionRange| { &m.start },
|m: &mut RevisionRange| { &mut m.start },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeInt64>(
"end",
|m: &RevisionRange| { &m.end },
|m: &mut RevisionRange| { &mut m.end },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<RevisionRange>(
"RevisionRange",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static RevisionRange {
static instance: ::protobuf::rt::LazyV2<RevisionRange> = ::protobuf::rt::LazyV2::INIT;
instance.get(RevisionRange::new)
}
}
impl ::protobuf::Clear for RevisionRange {
fn clear(&mut self) {
self.start = 0;
self.end = 0;
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for RevisionRange {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for RevisionRange {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(Clone,PartialEq,Eq,Debug,Hash)]
pub enum RevType {
DeprecatedLocal = 0,
DeprecatedRemote = 1,
}
impl ::protobuf::ProtobufEnum for RevType {
fn value(&self) -> i32 {
*self as i32
}
fn from_i32(value: i32) -> ::std::option::Option<RevType> {
match value {
0 => ::std::option::Option::Some(RevType::DeprecatedLocal),
1 => ::std::option::Option::Some(RevType::DeprecatedRemote),
_ => ::std::option::Option::None
}
}
fn values() -> &'static [Self] {
static values: &'static [RevType] = &[
RevType::DeprecatedLocal,
RevType::DeprecatedRemote,
];
values
}
fn enum_descriptor_static() -> &'static ::protobuf::reflect::EnumDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::EnumDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
::protobuf::reflect::EnumDescriptor::new_pb_name::<RevType>("RevType", file_descriptor_proto())
})
}
}
impl ::std::marker::Copy for RevType {
}
impl ::std::default::Default for RevType {
fn default() -> Self {
RevType::DeprecatedLocal
}
}
impl ::protobuf::reflect::ProtobufValue for RevType {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Enum(::protobuf::ProtobufEnum::descriptor(self))
}
}
static file_descriptor_proto_data: &'static [u8] = b"\
\n\x0erevision.proto\"\xc2\x01\n\x08Revision\x12\x1e\n\x0bbase_rev_id\
\x18\x01\x20\x01(\x03R\tbaseRevId\x12\x15\n\x06rev_id\x18\x02\x20\x01(\
\x03R\x05revId\x12\x1d\n\ndelta_data\x18\x03\x20\x01(\x0cR\tdeltaData\
\x12\x10\n\x03md5\x18\x04\x20\x01(\tR\x03md5\x12\x1b\n\tobject_id\x18\
\x05\x20\x01(\tR\x08objectId\x12\x18\n\x02ty\x18\x06\x20\x01(\x0e2\x08.R\
evTypeR\x02ty\x12\x17\n\x07user_id\x18\x07\x20\x01(\tR\x06userId\"3\n\
\x10RepeatedRevision\x12\x1f\n\x05items\x18\x01\x20\x03(\x0b2\t.Revision\
R\x05items\"\x1d\n\x05RevId\x12\x14\n\x05value\x18\x01\x20\x01(\x03R\x05\
value\"7\n\rRevisionRange\x12\x14\n\x05start\x18\x01\x20\x01(\x03R\x05st\
art\x12\x10\n\x03end\x18\x02\x20\x01(\x03R\x03end*4\n\x07RevType\x12\x13\
\n\x0fDeprecatedLocal\x10\0\x12\x14\n\x10DeprecatedRemote\x10\x01b\x06pr\
oto3\
";
static file_descriptor_proto_lazy: ::protobuf::rt::LazyV2<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::rt::LazyV2::INIT;
fn parse_descriptor_proto() -> ::protobuf::descriptor::FileDescriptorProto {
::protobuf::Message::parse_from_bytes(file_descriptor_proto_data).unwrap()
}
pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {
file_descriptor_proto_lazy.get(|| {
parse_descriptor_proto()
})
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,923 @@
// This file is generated by rust-protobuf 2.25.2. Do not edit
// @generated
// https://github.com/rust-lang/rust-clippy/issues/702
#![allow(unknown_lints)]
#![allow(clippy::all)]
#![allow(unused_attributes)]
#![cfg_attr(rustfmt, rustfmt::skip)]
#![allow(box_pointers)]
#![allow(dead_code)]
#![allow(missing_docs)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(non_upper_case_globals)]
#![allow(trivial_casts)]
#![allow(unused_imports)]
#![allow(unused_results)]
//! Generated file from `ws_data.proto`
/// Generated files are compatible only with the same version
/// of protobuf runtime.
// const _PROTOBUF_VERSION_CHECK: () = ::protobuf::VERSION_2_25_2;
#[derive(PartialEq,Clone,Default)]
pub struct ClientRevisionWSData {
// message fields
pub object_id: ::std::string::String,
pub ty: ClientRevisionWSDataType,
pub revisions: ::protobuf::SingularPtrField<super::revision::RepeatedRevision>,
pub data_id: ::std::string::String,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a ClientRevisionWSData {
fn default() -> &'a ClientRevisionWSData {
<ClientRevisionWSData as ::protobuf::Message>::default_instance()
}
}
impl ClientRevisionWSData {
pub fn new() -> ClientRevisionWSData {
::std::default::Default::default()
}
// string object_id = 1;
pub fn get_object_id(&self) -> &str {
&self.object_id
}
pub fn clear_object_id(&mut self) {
self.object_id.clear();
}
// Param is passed by value, moved
pub fn set_object_id(&mut self, v: ::std::string::String) {
self.object_id = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_object_id(&mut self) -> &mut ::std::string::String {
&mut self.object_id
}
// Take field
pub fn take_object_id(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.object_id, ::std::string::String::new())
}
// .ClientRevisionWSDataType ty = 2;
pub fn get_ty(&self) -> ClientRevisionWSDataType {
self.ty
}
pub fn clear_ty(&mut self) {
self.ty = ClientRevisionWSDataType::ClientPushRev;
}
// Param is passed by value, moved
pub fn set_ty(&mut self, v: ClientRevisionWSDataType) {
self.ty = v;
}
// .RepeatedRevision revisions = 3;
pub fn get_revisions(&self) -> &super::revision::RepeatedRevision {
self.revisions.as_ref().unwrap_or_else(|| <super::revision::RepeatedRevision as ::protobuf::Message>::default_instance())
}
pub fn clear_revisions(&mut self) {
self.revisions.clear();
}
pub fn has_revisions(&self) -> bool {
self.revisions.is_some()
}
// Param is passed by value, moved
pub fn set_revisions(&mut self, v: super::revision::RepeatedRevision) {
self.revisions = ::protobuf::SingularPtrField::some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_revisions(&mut self) -> &mut super::revision::RepeatedRevision {
if self.revisions.is_none() {
self.revisions.set_default();
}
self.revisions.as_mut().unwrap()
}
// Take field
pub fn take_revisions(&mut self) -> super::revision::RepeatedRevision {
self.revisions.take().unwrap_or_else(|| super::revision::RepeatedRevision::new())
}
// string data_id = 4;
pub fn get_data_id(&self) -> &str {
&self.data_id
}
pub fn clear_data_id(&mut self) {
self.data_id.clear();
}
// Param is passed by value, moved
pub fn set_data_id(&mut self, v: ::std::string::String) {
self.data_id = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_data_id(&mut self) -> &mut ::std::string::String {
&mut self.data_id
}
// Take field
pub fn take_data_id(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.data_id, ::std::string::String::new())
}
}
impl ::protobuf::Message for ClientRevisionWSData {
fn is_initialized(&self) -> bool {
for v in &self.revisions {
if !v.is_initialized() {
return false;
}
};
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.object_id)?;
},
2 => {
::protobuf::rt::read_proto3_enum_with_unknown_fields_into(wire_type, is, &mut self.ty, 2, &mut self.unknown_fields)?
},
3 => {
::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.revisions)?;
},
4 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.data_id)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.object_id.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.object_id);
}
if self.ty != ClientRevisionWSDataType::ClientPushRev {
my_size += ::protobuf::rt::enum_size(2, self.ty);
}
if let Some(ref v) = self.revisions.as_ref() {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
}
if !self.data_id.is_empty() {
my_size += ::protobuf::rt::string_size(4, &self.data_id);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.object_id.is_empty() {
os.write_string(1, &self.object_id)?;
}
if self.ty != ClientRevisionWSDataType::ClientPushRev {
os.write_enum(2, ::protobuf::ProtobufEnum::value(&self.ty))?;
}
if let Some(ref v) = self.revisions.as_ref() {
os.write_tag(3, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
}
if !self.data_id.is_empty() {
os.write_string(4, &self.data_id)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> ClientRevisionWSData {
ClientRevisionWSData::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"object_id",
|m: &ClientRevisionWSData| { &m.object_id },
|m: &mut ClientRevisionWSData| { &mut m.object_id },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeEnum<ClientRevisionWSDataType>>(
"ty",
|m: &ClientRevisionWSData| { &m.ty },
|m: &mut ClientRevisionWSData| { &mut m.ty },
));
fields.push(::protobuf::reflect::accessor::make_singular_ptr_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<super::revision::RepeatedRevision>>(
"revisions",
|m: &ClientRevisionWSData| { &m.revisions },
|m: &mut ClientRevisionWSData| { &mut m.revisions },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"data_id",
|m: &ClientRevisionWSData| { &m.data_id },
|m: &mut ClientRevisionWSData| { &mut m.data_id },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<ClientRevisionWSData>(
"ClientRevisionWSData",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static ClientRevisionWSData {
static instance: ::protobuf::rt::LazyV2<ClientRevisionWSData> = ::protobuf::rt::LazyV2::INIT;
instance.get(ClientRevisionWSData::new)
}
}
impl ::protobuf::Clear for ClientRevisionWSData {
fn clear(&mut self) {
self.object_id.clear();
self.ty = ClientRevisionWSDataType::ClientPushRev;
self.revisions.clear();
self.data_id.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for ClientRevisionWSData {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for ClientRevisionWSData {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct ServerRevisionWSData {
// message fields
pub object_id: ::std::string::String,
pub ty: ServerRevisionWSDataType,
pub data: ::std::vec::Vec<u8>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a ServerRevisionWSData {
fn default() -> &'a ServerRevisionWSData {
<ServerRevisionWSData as ::protobuf::Message>::default_instance()
}
}
impl ServerRevisionWSData {
pub fn new() -> ServerRevisionWSData {
::std::default::Default::default()
}
// string object_id = 1;
pub fn get_object_id(&self) -> &str {
&self.object_id
}
pub fn clear_object_id(&mut self) {
self.object_id.clear();
}
// Param is passed by value, moved
pub fn set_object_id(&mut self, v: ::std::string::String) {
self.object_id = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_object_id(&mut self) -> &mut ::std::string::String {
&mut self.object_id
}
// Take field
pub fn take_object_id(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.object_id, ::std::string::String::new())
}
// .ServerRevisionWSDataType ty = 2;
pub fn get_ty(&self) -> ServerRevisionWSDataType {
self.ty
}
pub fn clear_ty(&mut self) {
self.ty = ServerRevisionWSDataType::ServerAck;
}
// Param is passed by value, moved
pub fn set_ty(&mut self, v: ServerRevisionWSDataType) {
self.ty = v;
}
// bytes data = 3;
pub fn get_data(&self) -> &[u8] {
&self.data
}
pub fn clear_data(&mut self) {
self.data.clear();
}
// Param is passed by value, moved
pub fn set_data(&mut self, v: ::std::vec::Vec<u8>) {
self.data = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_data(&mut self) -> &mut ::std::vec::Vec<u8> {
&mut self.data
}
// Take field
pub fn take_data(&mut self) -> ::std::vec::Vec<u8> {
::std::mem::replace(&mut self.data, ::std::vec::Vec::new())
}
}
impl ::protobuf::Message for ServerRevisionWSData {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.object_id)?;
},
2 => {
::protobuf::rt::read_proto3_enum_with_unknown_fields_into(wire_type, is, &mut self.ty, 2, &mut self.unknown_fields)?
},
3 => {
::protobuf::rt::read_singular_proto3_bytes_into(wire_type, is, &mut self.data)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.object_id.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.object_id);
}
if self.ty != ServerRevisionWSDataType::ServerAck {
my_size += ::protobuf::rt::enum_size(2, self.ty);
}
if !self.data.is_empty() {
my_size += ::protobuf::rt::bytes_size(3, &self.data);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.object_id.is_empty() {
os.write_string(1, &self.object_id)?;
}
if self.ty != ServerRevisionWSDataType::ServerAck {
os.write_enum(2, ::protobuf::ProtobufEnum::value(&self.ty))?;
}
if !self.data.is_empty() {
os.write_bytes(3, &self.data)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> ServerRevisionWSData {
ServerRevisionWSData::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"object_id",
|m: &ServerRevisionWSData| { &m.object_id },
|m: &mut ServerRevisionWSData| { &mut m.object_id },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeEnum<ServerRevisionWSDataType>>(
"ty",
|m: &ServerRevisionWSData| { &m.ty },
|m: &mut ServerRevisionWSData| { &mut m.ty },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeBytes>(
"data",
|m: &ServerRevisionWSData| { &m.data },
|m: &mut ServerRevisionWSData| { &mut m.data },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<ServerRevisionWSData>(
"ServerRevisionWSData",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static ServerRevisionWSData {
static instance: ::protobuf::rt::LazyV2<ServerRevisionWSData> = ::protobuf::rt::LazyV2::INIT;
instance.get(ServerRevisionWSData::new)
}
}
impl ::protobuf::Clear for ServerRevisionWSData {
fn clear(&mut self) {
self.object_id.clear();
self.ty = ServerRevisionWSDataType::ServerAck;
self.data.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for ServerRevisionWSData {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for ServerRevisionWSData {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct NewDocumentUser {
// message fields
pub user_id: ::std::string::String,
pub doc_id: ::std::string::String,
pub revision_data: ::std::vec::Vec<u8>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a NewDocumentUser {
fn default() -> &'a NewDocumentUser {
<NewDocumentUser as ::protobuf::Message>::default_instance()
}
}
impl NewDocumentUser {
pub fn new() -> NewDocumentUser {
::std::default::Default::default()
}
// string user_id = 1;
pub fn get_user_id(&self) -> &str {
&self.user_id
}
pub fn clear_user_id(&mut self) {
self.user_id.clear();
}
// Param is passed by value, moved
pub fn set_user_id(&mut self, v: ::std::string::String) {
self.user_id = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_user_id(&mut self) -> &mut ::std::string::String {
&mut self.user_id
}
// Take field
pub fn take_user_id(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.user_id, ::std::string::String::new())
}
// string doc_id = 2;
pub fn get_doc_id(&self) -> &str {
&self.doc_id
}
pub fn clear_doc_id(&mut self) {
self.doc_id.clear();
}
// Param is passed by value, moved
pub fn set_doc_id(&mut self, v: ::std::string::String) {
self.doc_id = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_doc_id(&mut self) -> &mut ::std::string::String {
&mut self.doc_id
}
// Take field
pub fn take_doc_id(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.doc_id, ::std::string::String::new())
}
// bytes revision_data = 3;
pub fn get_revision_data(&self) -> &[u8] {
&self.revision_data
}
pub fn clear_revision_data(&mut self) {
self.revision_data.clear();
}
// Param is passed by value, moved
pub fn set_revision_data(&mut self, v: ::std::vec::Vec<u8>) {
self.revision_data = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_revision_data(&mut self) -> &mut ::std::vec::Vec<u8> {
&mut self.revision_data
}
// Take field
pub fn take_revision_data(&mut self) -> ::std::vec::Vec<u8> {
::std::mem::replace(&mut self.revision_data, ::std::vec::Vec::new())
}
}
impl ::protobuf::Message for NewDocumentUser {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.user_id)?;
},
2 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.doc_id)?;
},
3 => {
::protobuf::rt::read_singular_proto3_bytes_into(wire_type, is, &mut self.revision_data)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.user_id.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.user_id);
}
if !self.doc_id.is_empty() {
my_size += ::protobuf::rt::string_size(2, &self.doc_id);
}
if !self.revision_data.is_empty() {
my_size += ::protobuf::rt::bytes_size(3, &self.revision_data);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.user_id.is_empty() {
os.write_string(1, &self.user_id)?;
}
if !self.doc_id.is_empty() {
os.write_string(2, &self.doc_id)?;
}
if !self.revision_data.is_empty() {
os.write_bytes(3, &self.revision_data)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> NewDocumentUser {
NewDocumentUser::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"user_id",
|m: &NewDocumentUser| { &m.user_id },
|m: &mut NewDocumentUser| { &mut m.user_id },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"doc_id",
|m: &NewDocumentUser| { &m.doc_id },
|m: &mut NewDocumentUser| { &mut m.doc_id },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeBytes>(
"revision_data",
|m: &NewDocumentUser| { &m.revision_data },
|m: &mut NewDocumentUser| { &mut m.revision_data },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<NewDocumentUser>(
"NewDocumentUser",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static NewDocumentUser {
static instance: ::protobuf::rt::LazyV2<NewDocumentUser> = ::protobuf::rt::LazyV2::INIT;
instance.get(NewDocumentUser::new)
}
}
impl ::protobuf::Clear for NewDocumentUser {
fn clear(&mut self) {
self.user_id.clear();
self.doc_id.clear();
self.revision_data.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for NewDocumentUser {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for NewDocumentUser {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(Clone,PartialEq,Eq,Debug,Hash)]
pub enum ClientRevisionWSDataType {
ClientPushRev = 0,
ClientPing = 1,
}
impl ::protobuf::ProtobufEnum for ClientRevisionWSDataType {
fn value(&self) -> i32 {
*self as i32
}
fn from_i32(value: i32) -> ::std::option::Option<ClientRevisionWSDataType> {
match value {
0 => ::std::option::Option::Some(ClientRevisionWSDataType::ClientPushRev),
1 => ::std::option::Option::Some(ClientRevisionWSDataType::ClientPing),
_ => ::std::option::Option::None
}
}
fn values() -> &'static [Self] {
static values: &'static [ClientRevisionWSDataType] = &[
ClientRevisionWSDataType::ClientPushRev,
ClientRevisionWSDataType::ClientPing,
];
values
}
fn enum_descriptor_static() -> &'static ::protobuf::reflect::EnumDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::EnumDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
::protobuf::reflect::EnumDescriptor::new_pb_name::<ClientRevisionWSDataType>("ClientRevisionWSDataType", file_descriptor_proto())
})
}
}
impl ::std::marker::Copy for ClientRevisionWSDataType {
}
impl ::std::default::Default for ClientRevisionWSDataType {
fn default() -> Self {
ClientRevisionWSDataType::ClientPushRev
}
}
impl ::protobuf::reflect::ProtobufValue for ClientRevisionWSDataType {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Enum(::protobuf::ProtobufEnum::descriptor(self))
}
}
#[derive(Clone,PartialEq,Eq,Debug,Hash)]
pub enum ServerRevisionWSDataType {
ServerAck = 0,
ServerPushRev = 1,
ServerPullRev = 2,
UserConnect = 3,
}
impl ::protobuf::ProtobufEnum for ServerRevisionWSDataType {
fn value(&self) -> i32 {
*self as i32
}
fn from_i32(value: i32) -> ::std::option::Option<ServerRevisionWSDataType> {
match value {
0 => ::std::option::Option::Some(ServerRevisionWSDataType::ServerAck),
1 => ::std::option::Option::Some(ServerRevisionWSDataType::ServerPushRev),
2 => ::std::option::Option::Some(ServerRevisionWSDataType::ServerPullRev),
3 => ::std::option::Option::Some(ServerRevisionWSDataType::UserConnect),
_ => ::std::option::Option::None
}
}
fn values() -> &'static [Self] {
static values: &'static [ServerRevisionWSDataType] = &[
ServerRevisionWSDataType::ServerAck,
ServerRevisionWSDataType::ServerPushRev,
ServerRevisionWSDataType::ServerPullRev,
ServerRevisionWSDataType::UserConnect,
];
values
}
fn enum_descriptor_static() -> &'static ::protobuf::reflect::EnumDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::EnumDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
::protobuf::reflect::EnumDescriptor::new_pb_name::<ServerRevisionWSDataType>("ServerRevisionWSDataType", file_descriptor_proto())
})
}
}
impl ::std::marker::Copy for ServerRevisionWSDataType {
}
impl ::std::default::Default for ServerRevisionWSDataType {
fn default() -> Self {
ServerRevisionWSDataType::ServerAck
}
}
impl ::protobuf::reflect::ProtobufValue for ServerRevisionWSDataType {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Enum(::protobuf::ProtobufEnum::descriptor(self))
}
}
static file_descriptor_proto_data: &'static [u8] = b"\
\n\rws_data.proto\x1a\x0erevision.proto\"\xa8\x01\n\x14ClientRevisionWSD\
ata\x12\x1b\n\tobject_id\x18\x01\x20\x01(\tR\x08objectId\x12)\n\x02ty\
\x18\x02\x20\x01(\x0e2\x19.ClientRevisionWSDataTypeR\x02ty\x12/\n\trevis\
ions\x18\x03\x20\x01(\x0b2\x11.RepeatedRevisionR\trevisions\x12\x17\n\
\x07data_id\x18\x04\x20\x01(\tR\x06dataId\"r\n\x14ServerRevisionWSData\
\x12\x1b\n\tobject_id\x18\x01\x20\x01(\tR\x08objectId\x12)\n\x02ty\x18\
\x02\x20\x01(\x0e2\x19.ServerRevisionWSDataTypeR\x02ty\x12\x12\n\x04data\
\x18\x03\x20\x01(\x0cR\x04data\"f\n\x0fNewDocumentUser\x12\x17\n\x07user\
_id\x18\x01\x20\x01(\tR\x06userId\x12\x15\n\x06doc_id\x18\x02\x20\x01(\t\
R\x05docId\x12#\n\rrevision_data\x18\x03\x20\x01(\x0cR\x0crevisionData*=\
\n\x18ClientRevisionWSDataType\x12\x11\n\rClientPushRev\x10\0\x12\x0e\n\
\nClientPing\x10\x01*`\n\x18ServerRevisionWSDataType\x12\r\n\tServerAck\
\x10\0\x12\x11\n\rServerPushRev\x10\x01\x12\x11\n\rServerPullRev\x10\x02\
\x12\x0f\n\x0bUserConnect\x10\x03b\x06proto3\
";
static file_descriptor_proto_lazy: ::protobuf::rt::LazyV2<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::rt::LazyV2::INIT;
fn parse_descriptor_proto() -> ::protobuf::descriptor::FileDescriptorProto {
::protobuf::Message::parse_from_bytes(file_descriptor_proto_data).unwrap()
}
pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {
file_descriptor_proto_lazy.get(|| {
parse_descriptor_proto()
})
}

View File

@ -0,0 +1,8 @@
syntax = "proto3";
message FolderInfo {
string folder_id = 1;
string text = 2;
int64 rev_id = 3;
int64 base_rev_id = 4;
}

View File

@ -0,0 +1,25 @@
syntax = "proto3";
message Revision {
int64 base_rev_id = 1;
int64 rev_id = 2;
bytes delta_data = 3;
string md5 = 4;
string object_id = 5;
RevType ty = 6;
string user_id = 7;
}
message RepeatedRevision {
repeated Revision items = 1;
}
message RevId {
int64 value = 1;
}
message RevisionRange {
int64 start = 1;
int64 end = 2;
}
enum RevType {
DeprecatedLocal = 0;
DeprecatedRemote = 1;
}

View File

@ -0,0 +1,29 @@
syntax = "proto3";
import "revision.proto";
message CreateTextBlockParams {
string id = 1;
RepeatedRevision revisions = 2;
}
message TextBlockInfo {
string block_id = 1;
string text = 2;
int64 rev_id = 3;
int64 base_rev_id = 4;
}
message ResetTextBlockParams {
string block_id = 1;
RepeatedRevision revisions = 2;
}
message TextBlockDelta {
string block_id = 1;
string delta_str = 2;
}
message NewDocUser {
string user_id = 1;
int64 rev_id = 2;
string doc_id = 3;
}
message TextBlockId {
string value = 1;
}

View File

@ -0,0 +1,29 @@
syntax = "proto3";
import "revision.proto";
message ClientRevisionWSData {
string object_id = 1;
ClientRevisionWSDataType ty = 2;
RepeatedRevision revisions = 3;
string data_id = 4;
}
message ServerRevisionWSData {
string object_id = 1;
ServerRevisionWSDataType ty = 2;
bytes data = 3;
}
message NewDocumentUser {
string user_id = 1;
string doc_id = 2;
bytes revision_data = 3;
}
enum ClientRevisionWSDataType {
ClientPushRev = 0;
ClientPing = 1;
}
enum ServerRevisionWSDataType {
ServerAck = 0;
ServerPushRev = 1;
ServerPullRev = 2;
UserConnect = 3;
}

View File

@ -0,0 +1,362 @@
use crate::{
entities::{text_block_info::TextBlockInfo, ws_data::ServerRevisionWSDataBuilder},
errors::{internal_error, CollaborateError, CollaborateResult},
protobuf::{ClientRevisionWSData, RepeatedRevision as RepeatedRevisionPB, Revision as RevisionPB},
server_document::document_pad::ServerDocument,
synchronizer::{RevisionSyncPersistence, RevisionSyncResponse, RevisionSynchronizer, RevisionUser},
util::rev_id_from_str,
};
use async_stream::stream;
use dashmap::DashMap;
use futures::stream::StreamExt;
use lib_infra::future::BoxResultFuture;
use lib_ot::rich_text::{RichTextAttributes, RichTextDelta};
use std::{collections::HashMap, fmt::Debug, sync::Arc};
use tokio::{
sync::{mpsc, oneshot, RwLock},
task::spawn_blocking,
};
pub trait TextBlockCloudPersistence: Send + Sync + Debug {
fn read_text_block(&self, doc_id: &str) -> BoxResultFuture<TextBlockInfo, CollaborateError>;
fn create_text_block(
&self,
doc_id: &str,
repeated_revision: RepeatedRevisionPB,
) -> BoxResultFuture<Option<TextBlockInfo>, CollaborateError>;
fn read_text_block_revisions(
&self,
doc_id: &str,
rev_ids: Option<Vec<i64>>,
) -> BoxResultFuture<Vec<RevisionPB>, CollaborateError>;
fn save_text_block_revisions(&self, repeated_revision: RepeatedRevisionPB)
-> BoxResultFuture<(), CollaborateError>;
fn reset_text_block(
&self,
doc_id: &str,
repeated_revision: RepeatedRevisionPB,
) -> BoxResultFuture<(), CollaborateError>;
}
impl RevisionSyncPersistence for Arc<dyn TextBlockCloudPersistence> {
fn read_revisions(
&self,
object_id: &str,
rev_ids: Option<Vec<i64>>,
) -> BoxResultFuture<Vec<RevisionPB>, CollaborateError> {
(**self).read_text_block_revisions(object_id, rev_ids)
}
fn save_revisions(&self, repeated_revision: RepeatedRevisionPB) -> BoxResultFuture<(), CollaborateError> {
(**self).save_text_block_revisions(repeated_revision)
}
fn reset_object(
&self,
object_id: &str,
repeated_revision: RepeatedRevisionPB,
) -> BoxResultFuture<(), CollaborateError> {
(**self).reset_text_block(object_id, repeated_revision)
}
}
pub struct ServerDocumentManager {
document_handlers: Arc<RwLock<HashMap<String, Arc<OpenDocumentHandler>>>>,
persistence: Arc<dyn TextBlockCloudPersistence>,
}
impl ServerDocumentManager {
pub fn new(persistence: Arc<dyn TextBlockCloudPersistence>) -> Self {
Self {
document_handlers: Arc::new(RwLock::new(HashMap::new())),
persistence,
}
}
pub async fn handle_client_revisions(
&self,
user: Arc<dyn RevisionUser>,
mut client_data: ClientRevisionWSData,
) -> Result<(), CollaborateError> {
let repeated_revision = client_data.take_revisions();
let cloned_user = user.clone();
let ack_id = rev_id_from_str(&client_data.data_id)?;
let object_id = client_data.object_id;
let result = match self.get_document_handler(&object_id).await {
None => {
tracing::trace!("Can't find the document. Creating the document {}", object_id);
let _ = self.create_document(&object_id, repeated_revision).await.map_err(|e| {
CollaborateError::internal().context(format!("Server create document failed: {}", e))
})?;
Ok(())
}
Some(handler) => {
let _ = handler.apply_revisions(user, repeated_revision).await?;
Ok(())
}
};
if result.is_ok() {
cloned_user.receive(RevisionSyncResponse::Ack(
ServerRevisionWSDataBuilder::build_ack_message(&object_id, ack_id),
));
}
result
}
pub async fn handle_client_ping(
&self,
user: Arc<dyn RevisionUser>,
client_data: ClientRevisionWSData,
) -> Result<(), CollaborateError> {
let rev_id = rev_id_from_str(&client_data.data_id)?;
let doc_id = client_data.object_id.clone();
match self.get_document_handler(&doc_id).await {
None => {
tracing::trace!("Document:{} doesn't exist, ignore client ping", doc_id);
Ok(())
}
Some(handler) => {
let _ = handler.apply_ping(rev_id, user).await?;
Ok(())
}
}
}
pub async fn handle_document_reset(
&self,
doc_id: &str,
mut repeated_revision: RepeatedRevisionPB,
) -> Result<(), CollaborateError> {
repeated_revision.mut_items().sort_by(|a, b| a.rev_id.cmp(&b.rev_id));
match self.get_document_handler(doc_id).await {
None => {
tracing::warn!("Document:{} doesn't exist, ignore document reset", doc_id);
Ok(())
}
Some(handler) => {
let _ = handler.apply_document_reset(repeated_revision).await?;
Ok(())
}
}
}
async fn get_document_handler(&self, doc_id: &str) -> Option<Arc<OpenDocumentHandler>> {
if let Some(handler) = self.document_handlers.read().await.get(doc_id).cloned() {
return Some(handler);
}
let mut write_guard = self.document_handlers.write().await;
match self.persistence.read_text_block(doc_id).await {
Ok(doc) => {
let handler = self.create_document_handler(doc).await.map_err(internal_error).unwrap();
write_guard.insert(doc_id.to_owned(), handler.clone());
drop(write_guard);
Some(handler)
}
Err(_) => None,
}
}
#[tracing::instrument(level = "debug", skip(self, repeated_revision), err)]
async fn create_document(
&self,
doc_id: &str,
repeated_revision: RepeatedRevisionPB,
) -> Result<Arc<OpenDocumentHandler>, CollaborateError> {
match self.persistence.create_text_block(doc_id, repeated_revision).await? {
None => Err(CollaborateError::internal().context("Create document info from revisions failed")),
Some(doc) => {
let handler = self.create_document_handler(doc).await?;
self.document_handlers
.write()
.await
.insert(doc_id.to_owned(), handler.clone());
Ok(handler)
}
}
}
async fn create_document_handler(&self, doc: TextBlockInfo) -> Result<Arc<OpenDocumentHandler>, CollaborateError> {
let persistence = self.persistence.clone();
let handle = spawn_blocking(|| OpenDocumentHandler::new(doc, persistence))
.await
.map_err(|e| CollaborateError::internal().context(format!("Create document handler failed: {}", e)))?;
Ok(Arc::new(handle?))
}
}
impl std::ops::Drop for ServerDocumentManager {
fn drop(&mut self) {
log::trace!("ServerDocumentManager was dropped");
}
}
type DocumentRevisionSynchronizer = RevisionSynchronizer<RichTextAttributes>;
struct OpenDocumentHandler {
doc_id: String,
sender: mpsc::Sender<DocumentCommand>,
users: DashMap<String, Arc<dyn RevisionUser>>,
}
impl OpenDocumentHandler {
fn new(doc: TextBlockInfo, persistence: Arc<dyn TextBlockCloudPersistence>) -> Result<Self, CollaborateError> {
let doc_id = doc.block_id.clone();
let (sender, receiver) = mpsc::channel(1000);
let users = DashMap::new();
let delta = RichTextDelta::from_bytes(&doc.text)?;
let sync_object = ServerDocument::from_delta(&doc_id, delta);
let synchronizer = Arc::new(DocumentRevisionSynchronizer::new(doc.rev_id, sync_object, persistence));
let queue = DocumentCommandRunner::new(&doc.block_id, receiver, synchronizer);
tokio::task::spawn(queue.run());
Ok(Self { doc_id, sender, users })
}
#[tracing::instrument(
name = "server_document_apply_revision",
level = "trace",
skip(self, user, repeated_revision),
err
)]
async fn apply_revisions(
&self,
user: Arc<dyn RevisionUser>,
repeated_revision: RepeatedRevisionPB,
) -> Result<(), CollaborateError> {
let (ret, rx) = oneshot::channel();
self.users.insert(user.user_id(), user.clone());
let msg = DocumentCommand::ApplyRevisions {
user,
repeated_revision,
ret,
};
let result = self.send(msg, rx).await?;
result
}
async fn apply_ping(&self, rev_id: i64, user: Arc<dyn RevisionUser>) -> Result<(), CollaborateError> {
let (ret, rx) = oneshot::channel();
self.users.insert(user.user_id(), user.clone());
let msg = DocumentCommand::Ping { user, rev_id, ret };
let result = self.send(msg, rx).await?;
result
}
#[tracing::instrument(level = "debug", skip(self, repeated_revision), err)]
async fn apply_document_reset(&self, repeated_revision: RepeatedRevisionPB) -> Result<(), CollaborateError> {
let (ret, rx) = oneshot::channel();
let msg = DocumentCommand::Reset { repeated_revision, ret };
let result = self.send(msg, rx).await?;
result
}
async fn send<T>(&self, msg: DocumentCommand, rx: oneshot::Receiver<T>) -> CollaborateResult<T> {
let _ = self
.sender
.send(msg)
.await
.map_err(|e| CollaborateError::internal().context(format!("Send document command failed: {}", e)))?;
Ok(rx.await.map_err(internal_error)?)
}
}
impl std::ops::Drop for OpenDocumentHandler {
fn drop(&mut self) {
tracing::trace!("{} OpenDocHandle was dropped", self.doc_id);
}
}
// #[derive(Debug)]
enum DocumentCommand {
ApplyRevisions {
user: Arc<dyn RevisionUser>,
repeated_revision: RepeatedRevisionPB,
ret: oneshot::Sender<CollaborateResult<()>>,
},
Ping {
user: Arc<dyn RevisionUser>,
rev_id: i64,
ret: oneshot::Sender<CollaborateResult<()>>,
},
Reset {
repeated_revision: RepeatedRevisionPB,
ret: oneshot::Sender<CollaborateResult<()>>,
},
}
struct DocumentCommandRunner {
pub doc_id: String,
receiver: Option<mpsc::Receiver<DocumentCommand>>,
synchronizer: Arc<DocumentRevisionSynchronizer>,
}
impl DocumentCommandRunner {
fn new(
doc_id: &str,
receiver: mpsc::Receiver<DocumentCommand>,
synchronizer: Arc<DocumentRevisionSynchronizer>,
) -> Self {
Self {
doc_id: doc_id.to_owned(),
receiver: Some(receiver),
synchronizer,
}
}
async fn run(mut self) {
let mut receiver = self
.receiver
.take()
.expect("DocumentCommandRunner's receiver should only take one time");
let stream = stream! {
loop {
match receiver.recv().await {
Some(msg) => yield msg,
None => break,
}
}
};
stream.for_each(|msg| self.handle_message(msg)).await;
}
async fn handle_message(&self, msg: DocumentCommand) {
match msg {
DocumentCommand::ApplyRevisions {
user,
repeated_revision,
ret,
} => {
let result = self
.synchronizer
.sync_revisions(user, repeated_revision)
.await
.map_err(internal_error);
let _ = ret.send(result);
}
DocumentCommand::Ping { user, rev_id, ret } => {
let result = self.synchronizer.pong(user, rev_id).await.map_err(internal_error);
let _ = ret.send(result);
}
DocumentCommand::Reset { repeated_revision, ret } => {
let result = self.synchronizer.reset(repeated_revision).await.map_err(internal_error);
let _ = ret.send(result);
}
}
}
}
impl std::ops::Drop for DocumentCommandRunner {
fn drop(&mut self) {
tracing::trace!("{} DocumentCommandQueue was dropped", self.doc_id);
}
}

View File

@ -0,0 +1,48 @@
use crate::{client_document::InitialDocumentText, errors::CollaborateError, synchronizer::RevisionSyncObject};
use lib_ot::{
core::*,
rich_text::{RichTextAttributes, RichTextDelta},
};
pub struct ServerDocument {
doc_id: String,
delta: RichTextDelta,
}
impl ServerDocument {
#[allow(dead_code)]
pub fn new<C: InitialDocumentText>(doc_id: &str) -> Self {
Self::from_delta(doc_id, C::initial_delta())
}
pub fn from_delta(doc_id: &str, delta: RichTextDelta) -> Self {
let doc_id = doc_id.to_owned();
ServerDocument { doc_id, delta }
}
}
impl RevisionSyncObject<RichTextAttributes> for ServerDocument {
fn id(&self) -> &str {
&self.doc_id
}
fn compose(&mut self, other: &RichTextDelta) -> Result<(), CollaborateError> {
// tracing::trace!("{} compose {}", &self.delta.to_json(), other.to_json());
let new_delta = self.delta.compose(other)?;
self.delta = new_delta;
Ok(())
}
fn transform(&self, other: &RichTextDelta) -> Result<(RichTextDelta, RichTextDelta), CollaborateError> {
let value = self.delta.transform(other)?;
Ok(value)
}
fn to_json(&self) -> String {
self.delta.to_delta_str()
}
fn set_delta(&mut self, new_delta: Delta<RichTextAttributes>) {
self.delta = new_delta;
}
}

View File

@ -0,0 +1,4 @@
mod document_manager;
mod document_pad;
pub use document_manager::*;

View File

@ -0,0 +1,332 @@
use crate::{
entities::{
folder_info::{FolderDelta, FolderInfo},
ws_data::ServerRevisionWSDataBuilder,
},
errors::{internal_error, CollaborateError, CollaborateResult},
protobuf::{ClientRevisionWSData, RepeatedRevision as RepeatedRevisionPB, Revision as RevisionPB},
server_folder::folder_pad::ServerFolder,
synchronizer::{RevisionSyncPersistence, RevisionSyncResponse, RevisionSynchronizer, RevisionUser},
util::rev_id_from_str,
};
use async_stream::stream;
use futures::stream::StreamExt;
use lib_infra::future::BoxResultFuture;
use lib_ot::core::PlainTextAttributes;
use std::{collections::HashMap, fmt::Debug, sync::Arc};
use tokio::{
sync::{mpsc, oneshot, RwLock},
task::spawn_blocking,
};
pub trait FolderCloudPersistence: Send + Sync + Debug {
fn read_folder(&self, user_id: &str, folder_id: &str) -> BoxResultFuture<FolderInfo, CollaborateError>;
fn create_folder(
&self,
user_id: &str,
folder_id: &str,
repeated_revision: RepeatedRevisionPB,
) -> BoxResultFuture<Option<FolderInfo>, CollaborateError>;
fn save_folder_revisions(&self, repeated_revision: RepeatedRevisionPB) -> BoxResultFuture<(), CollaborateError>;
fn read_folder_revisions(
&self,
folder_id: &str,
rev_ids: Option<Vec<i64>>,
) -> BoxResultFuture<Vec<RevisionPB>, CollaborateError>;
fn reset_folder(
&self,
folder_id: &str,
repeated_revision: RepeatedRevisionPB,
) -> BoxResultFuture<(), CollaborateError>;
}
impl RevisionSyncPersistence for Arc<dyn FolderCloudPersistence> {
fn read_revisions(
&self,
object_id: &str,
rev_ids: Option<Vec<i64>>,
) -> BoxResultFuture<Vec<RevisionPB>, CollaborateError> {
(**self).read_folder_revisions(object_id, rev_ids)
}
fn save_revisions(&self, repeated_revision: RepeatedRevisionPB) -> BoxResultFuture<(), CollaborateError> {
(**self).save_folder_revisions(repeated_revision)
}
fn reset_object(
&self,
object_id: &str,
repeated_revision: RepeatedRevisionPB,
) -> BoxResultFuture<(), CollaborateError> {
(**self).reset_folder(object_id, repeated_revision)
}
}
pub struct ServerFolderManager {
folder_handlers: Arc<RwLock<HashMap<String, Arc<OpenFolderHandler>>>>,
persistence: Arc<dyn FolderCloudPersistence>,
}
impl ServerFolderManager {
pub fn new(persistence: Arc<dyn FolderCloudPersistence>) -> Self {
Self {
folder_handlers: Arc::new(RwLock::new(HashMap::new())),
persistence,
}
}
pub async fn handle_client_revisions(
&self,
user: Arc<dyn RevisionUser>,
mut client_data: ClientRevisionWSData,
) -> Result<(), CollaborateError> {
let repeated_revision = client_data.take_revisions();
let cloned_user = user.clone();
let ack_id = rev_id_from_str(&client_data.data_id)?;
let folder_id = client_data.object_id;
let user_id = user.user_id();
let result = match self.get_folder_handler(&user_id, &folder_id).await {
None => {
let _ = self
.create_folder(&user_id, &folder_id, repeated_revision)
.await
.map_err(|e| CollaborateError::internal().context(format!("Server create folder failed: {}", e)))?;
Ok(())
}
Some(handler) => {
let _ = handler.apply_revisions(user, repeated_revision).await?;
Ok(())
}
};
if result.is_ok() {
cloned_user.receive(RevisionSyncResponse::Ack(
ServerRevisionWSDataBuilder::build_ack_message(&folder_id, ack_id),
));
}
result
}
pub async fn handle_client_ping(
&self,
user: Arc<dyn RevisionUser>,
client_data: ClientRevisionWSData,
) -> Result<(), CollaborateError> {
let user_id = user.user_id();
let rev_id = rev_id_from_str(&client_data.data_id)?;
let folder_id = client_data.object_id.clone();
match self.get_folder_handler(&user_id, &folder_id).await {
None => {
tracing::trace!("Folder:{} doesn't exist, ignore client ping", folder_id);
Ok(())
}
Some(handler) => {
let _ = handler.apply_ping(rev_id, user).await?;
Ok(())
}
}
}
async fn get_folder_handler(&self, user_id: &str, folder_id: &str) -> Option<Arc<OpenFolderHandler>> {
let folder_id = folder_id.to_owned();
if let Some(handler) = self.folder_handlers.read().await.get(&folder_id).cloned() {
return Some(handler);
}
let mut write_guard = self.folder_handlers.write().await;
match self.persistence.read_folder(user_id, &folder_id).await {
Ok(folder_info) => {
let handler = self
.create_folder_handler(folder_info)
.await
.map_err(internal_error)
.unwrap();
write_guard.insert(folder_id, handler.clone());
drop(write_guard);
Some(handler)
}
Err(_) => None,
}
}
async fn create_folder_handler(&self, folder_info: FolderInfo) -> Result<Arc<OpenFolderHandler>, CollaborateError> {
let persistence = self.persistence.clone();
let handle = spawn_blocking(|| OpenFolderHandler::new(folder_info, persistence))
.await
.map_err(|e| CollaborateError::internal().context(format!("Create folder handler failed: {}", e)))?;
Ok(Arc::new(handle?))
}
#[tracing::instrument(level = "debug", skip(self, repeated_revision), err)]
async fn create_folder(
&self,
user_id: &str,
folder_id: &str,
repeated_revision: RepeatedRevisionPB,
) -> Result<Arc<OpenFolderHandler>, CollaborateError> {
match self
.persistence
.create_folder(user_id, folder_id, repeated_revision)
.await?
{
Some(folder_info) => {
let handler = self.create_folder_handler(folder_info).await?;
self.folder_handlers
.write()
.await
.insert(folder_id.to_owned(), handler.clone());
Ok(handler)
}
None => Err(CollaborateError::internal().context(String::new())),
}
}
}
type FolderRevisionSynchronizer = RevisionSynchronizer<PlainTextAttributes>;
struct OpenFolderHandler {
folder_id: String,
sender: mpsc::Sender<FolderCommand>,
}
impl OpenFolderHandler {
fn new(folder_info: FolderInfo, persistence: Arc<dyn FolderCloudPersistence>) -> CollaborateResult<Self> {
let (sender, receiver) = mpsc::channel(1000);
let folder_id = folder_info.folder_id.clone();
let delta = FolderDelta::from_bytes(&folder_info.text)?;
let sync_object = ServerFolder::from_delta(&folder_id, delta);
let synchronizer = Arc::new(FolderRevisionSynchronizer::new(
folder_info.rev_id,
sync_object,
persistence,
));
let queue = FolderCommandRunner::new(&folder_id, receiver, synchronizer);
tokio::task::spawn(queue.run());
Ok(Self { folder_id, sender })
}
#[tracing::instrument(
name = "server_folder_apply_revision",
level = "trace",
skip(self, user, repeated_revision),
err
)]
async fn apply_revisions(
&self,
user: Arc<dyn RevisionUser>,
repeated_revision: RepeatedRevisionPB,
) -> CollaborateResult<()> {
let (ret, rx) = oneshot::channel();
let msg = FolderCommand::ApplyRevisions {
user,
repeated_revision,
ret,
};
self.send(msg, rx).await?
}
async fn apply_ping(&self, rev_id: i64, user: Arc<dyn RevisionUser>) -> Result<(), CollaborateError> {
let (ret, rx) = oneshot::channel();
let msg = FolderCommand::Ping { user, rev_id, ret };
self.send(msg, rx).await?
}
async fn send<T>(&self, msg: FolderCommand, rx: oneshot::Receiver<T>) -> CollaborateResult<T> {
let _ = self
.sender
.send(msg)
.await
.map_err(|e| CollaborateError::internal().context(format!("Send folder command failed: {}", e)))?;
Ok(rx.await.map_err(internal_error)?)
}
}
impl std::ops::Drop for OpenFolderHandler {
fn drop(&mut self) {
tracing::trace!("{} OpenFolderHandler was dropped", self.folder_id);
}
}
enum FolderCommand {
ApplyRevisions {
user: Arc<dyn RevisionUser>,
repeated_revision: RepeatedRevisionPB,
ret: oneshot::Sender<CollaborateResult<()>>,
},
Ping {
user: Arc<dyn RevisionUser>,
rev_id: i64,
ret: oneshot::Sender<CollaborateResult<()>>,
},
}
struct FolderCommandRunner {
folder_id: String,
receiver: Option<mpsc::Receiver<FolderCommand>>,
synchronizer: Arc<FolderRevisionSynchronizer>,
}
impl FolderCommandRunner {
fn new(
folder_id: &str,
receiver: mpsc::Receiver<FolderCommand>,
synchronizer: Arc<FolderRevisionSynchronizer>,
) -> Self {
Self {
folder_id: folder_id.to_owned(),
receiver: Some(receiver),
synchronizer,
}
}
async fn run(mut self) {
let mut receiver = self
.receiver
.take()
.expect("FolderCommandRunner's receiver should only take one time");
let stream = stream! {
loop {
match receiver.recv().await {
Some(msg) => yield msg,
None => break,
}
}
};
stream.for_each(|msg| self.handle_message(msg)).await;
}
async fn handle_message(&self, msg: FolderCommand) {
match msg {
FolderCommand::ApplyRevisions {
user,
repeated_revision,
ret,
} => {
let result = self
.synchronizer
.sync_revisions(user, repeated_revision)
.await
.map_err(internal_error);
let _ = ret.send(result);
}
FolderCommand::Ping { user, rev_id, ret } => {
let result = self.synchronizer.pong(user, rev_id).await.map_err(internal_error);
let _ = ret.send(result);
}
}
}
}
impl std::ops::Drop for FolderCommandRunner {
fn drop(&mut self) {
tracing::trace!("{} FolderCommandRunner was dropped", self.folder_id);
}
}

View File

@ -0,0 +1,41 @@
use crate::{entities::folder_info::FolderDelta, errors::CollaborateError, synchronizer::RevisionSyncObject};
use lib_ot::core::{OperationTransformable, PlainTextAttributes, PlainTextDelta};
pub struct ServerFolder {
folder_id: String,
delta: FolderDelta,
}
impl ServerFolder {
pub fn from_delta(folder_id: &str, delta: FolderDelta) -> Self {
Self {
folder_id: folder_id.to_owned(),
delta,
}
}
}
impl RevisionSyncObject<PlainTextAttributes> for ServerFolder {
fn id(&self) -> &str {
&self.folder_id
}
fn compose(&mut self, other: &PlainTextDelta) -> Result<(), CollaborateError> {
let new_delta = self.delta.compose(other)?;
self.delta = new_delta;
Ok(())
}
fn transform(&self, other: &PlainTextDelta) -> Result<(PlainTextDelta, PlainTextDelta), CollaborateError> {
let value = self.delta.transform(other)?;
Ok(value)
}
fn to_json(&self) -> String {
self.delta.to_delta_str()
}
fn set_delta(&mut self, new_delta: PlainTextDelta) {
self.delta = new_delta;
}
}

View File

@ -0,0 +1,4 @@
mod folder_manager;
mod folder_pad;
pub use folder_manager::*;

View File

@ -0,0 +1,264 @@
use crate::{
entities::{
revision::RevisionRange,
ws_data::{ServerRevisionWSData, ServerRevisionWSDataBuilder},
},
errors::CollaborateError,
protobuf::{RepeatedRevision as RepeatedRevisionPB, Revision as RevisionPB},
util::*,
};
use lib_infra::future::BoxResultFuture;
use lib_ot::core::{Attributes, Delta};
use parking_lot::RwLock;
use serde::de::DeserializeOwned;
use std::{
cmp::Ordering,
fmt::Debug,
sync::{
atomic::{AtomicI64, Ordering::SeqCst},
Arc,
},
time::Duration,
};
pub trait RevisionUser: Send + Sync + Debug {
fn user_id(&self) -> String;
fn receive(&self, resp: RevisionSyncResponse);
}
pub trait RevisionSyncPersistence: Send + Sync + 'static {
fn read_revisions(
&self,
object_id: &str,
rev_ids: Option<Vec<i64>>,
) -> BoxResultFuture<Vec<RevisionPB>, CollaborateError>;
fn save_revisions(&self, repeated_revision: RepeatedRevisionPB) -> BoxResultFuture<(), CollaborateError>;
fn reset_object(
&self,
object_id: &str,
repeated_revision: RepeatedRevisionPB,
) -> BoxResultFuture<(), CollaborateError>;
}
pub trait RevisionSyncObject<T: Attributes>: Send + Sync + 'static {
fn id(&self) -> &str;
fn compose(&mut self, other: &Delta<T>) -> Result<(), CollaborateError>;
fn transform(&self, other: &Delta<T>) -> Result<(Delta<T>, Delta<T>), CollaborateError>;
fn to_json(&self) -> String;
fn set_delta(&mut self, new_delta: Delta<T>);
}
pub enum RevisionSyncResponse {
Pull(ServerRevisionWSData),
Push(ServerRevisionWSData),
Ack(ServerRevisionWSData),
}
pub struct RevisionSynchronizer<T: Attributes> {
object_id: String,
rev_id: AtomicI64,
object: Arc<RwLock<dyn RevisionSyncObject<T>>>,
persistence: Arc<dyn RevisionSyncPersistence>,
}
impl<T> RevisionSynchronizer<T>
where
T: Attributes + DeserializeOwned + serde::Serialize + 'static,
{
pub fn new<S, P>(rev_id: i64, sync_object: S, persistence: P) -> RevisionSynchronizer<T>
where
S: RevisionSyncObject<T>,
P: RevisionSyncPersistence,
{
let object = Arc::new(RwLock::new(sync_object));
let persistence = Arc::new(persistence);
let object_id = object.read().id().to_owned();
RevisionSynchronizer {
object_id,
rev_id: AtomicI64::new(rev_id),
object,
persistence,
}
}
#[tracing::instrument(level = "trace", skip(self, user, repeated_revision), err)]
pub async fn sync_revisions(
&self,
user: Arc<dyn RevisionUser>,
repeated_revision: RepeatedRevisionPB,
) -> Result<(), CollaborateError> {
let object_id = self.object_id.clone();
if repeated_revision.get_items().is_empty() {
// Return all the revisions to client
let revisions = self.persistence.read_revisions(&object_id, None).await?;
let repeated_revision = repeated_revision_from_revision_pbs(revisions)?;
let data = ServerRevisionWSDataBuilder::build_push_message(&object_id, repeated_revision);
user.receive(RevisionSyncResponse::Push(data));
return Ok(());
}
let server_base_rev_id = self.rev_id.load(SeqCst);
let first_revision = repeated_revision.get_items().first().unwrap().clone();
if self.is_applied_before(&first_revision, &self.persistence).await {
// Server has received this revision before, so ignore the following revisions
return Ok(());
}
match server_base_rev_id.cmp(&first_revision.rev_id) {
Ordering::Less => {
let server_rev_id = next(server_base_rev_id);
if server_base_rev_id == first_revision.base_rev_id || server_rev_id == first_revision.rev_id {
// The rev is in the right order, just compose it.
for revision in repeated_revision.get_items() {
let _ = self.compose_revision(revision)?;
}
let _ = self.persistence.save_revisions(repeated_revision).await?;
} else {
// The server delta is outdated, pull the missing revision from the client.
let range = RevisionRange {
start: server_rev_id,
end: first_revision.rev_id,
};
let msg = ServerRevisionWSDataBuilder::build_pull_message(&self.object_id, range);
user.receive(RevisionSyncResponse::Pull(msg));
}
}
Ordering::Equal => {
// Do nothing
tracing::trace!("Applied {} revision rev_id is the same as cur_rev_id", self.object_id);
}
Ordering::Greater => {
// The client delta is outdated. Transform the client revision delta and then
// send the prime delta to the client. Client should compose the this prime
// delta.
let from_rev_id = first_revision.rev_id;
let to_rev_id = server_base_rev_id;
let _ = self.push_revisions_to_user(user, from_rev_id, to_rev_id).await;
}
}
Ok(())
}
#[tracing::instrument(level = "trace", skip(self, user), fields(server_rev_id), err)]
pub async fn pong(&self, user: Arc<dyn RevisionUser>, client_rev_id: i64) -> Result<(), CollaborateError> {
let object_id = self.object_id.clone();
let server_rev_id = self.rev_id();
tracing::Span::current().record("server_rev_id", &server_rev_id);
match server_rev_id.cmp(&client_rev_id) {
Ordering::Less => {
tracing::trace!("Client should not send ping and the server should pull the revisions from the client")
}
Ordering::Equal => tracing::trace!("{} is up to date.", object_id),
Ordering::Greater => {
// The client delta is outdated. Transform the client revision delta and then
// send the prime delta to the client. Client should compose the this prime
// delta.
let from_rev_id = client_rev_id;
let to_rev_id = server_rev_id;
tracing::trace!("Push revisions to user");
let _ = self.push_revisions_to_user(user, from_rev_id, to_rev_id).await;
}
}
Ok(())
}
#[tracing::instrument(level = "debug", skip(self, repeated_revision), fields(object_id), err)]
pub async fn reset(&self, repeated_revision: RepeatedRevisionPB) -> Result<(), CollaborateError> {
let object_id = self.object_id.clone();
tracing::Span::current().record("object_id", &object_id.as_str());
let revisions: Vec<RevisionPB> = repeated_revision.get_items().to_vec();
let (_, rev_id) = pair_rev_id_from_revision_pbs(&revisions);
let delta = make_delta_from_revision_pb(revisions)?;
let _ = self.persistence.reset_object(&object_id, repeated_revision).await?;
self.object.write().set_delta(delta);
let _ = self.rev_id.fetch_update(SeqCst, SeqCst, |_e| Some(rev_id));
Ok(())
}
pub fn object_json(&self) -> String {
self.object.read().to_json()
}
fn compose_revision(&self, revision: &RevisionPB) -> Result<(), CollaborateError> {
let delta = Delta::<T>::from_bytes(&revision.delta_data)?;
let _ = self.compose_delta(delta)?;
let _ = self.rev_id.fetch_update(SeqCst, SeqCst, |_e| Some(revision.rev_id));
Ok(())
}
#[tracing::instrument(level = "debug", skip(self, revision))]
fn transform_revision(&self, revision: &RevisionPB) -> Result<(Delta<T>, Delta<T>), CollaborateError> {
let cli_delta = Delta::<T>::from_bytes(&revision.delta_data)?;
let result = self.object.read().transform(&cli_delta)?;
Ok(result)
}
fn compose_delta(&self, delta: Delta<T>) -> Result<(), CollaborateError> {
if delta.is_empty() {
log::warn!("Composed delta is empty");
}
match self.object.try_write_for(Duration::from_millis(300)) {
None => log::error!("Failed to acquire write lock of object"),
Some(mut write_guard) => {
let _ = write_guard.compose(&delta)?;
}
}
Ok(())
}
pub(crate) fn rev_id(&self) -> i64 {
self.rev_id.load(SeqCst)
}
async fn is_applied_before(
&self,
new_revision: &RevisionPB,
persistence: &Arc<dyn RevisionSyncPersistence>,
) -> bool {
let rev_ids = Some(vec![new_revision.rev_id]);
if let Ok(revisions) = persistence.read_revisions(&self.object_id, rev_ids).await {
if let Some(revision) = revisions.first() {
if revision.md5 == new_revision.md5 {
return true;
}
}
};
false
}
async fn push_revisions_to_user(&self, user: Arc<dyn RevisionUser>, from: i64, to: i64) {
let rev_ids: Vec<i64> = (from..=to).collect();
tracing::debug!("Push revision: {} -> {} to client", from, to);
match self
.persistence
.read_revisions(&self.object_id, Some(rev_ids.clone()))
.await
{
Ok(revisions) => {
if !rev_ids.is_empty() && revisions.is_empty() {
tracing::trace!("{}: can not read the revisions in range {:?}", self.object_id, rev_ids);
// assert_eq!(revisions.is_empty(), rev_ids.is_empty(),);
}
match repeated_revision_from_revision_pbs(revisions) {
Ok(repeated_revision) => {
let data = ServerRevisionWSDataBuilder::build_push_message(&self.object_id, repeated_revision);
user.receive(RevisionSyncResponse::Push(data));
}
Err(e) => tracing::error!("{}", e),
}
}
Err(e) => {
tracing::error!("{}", e);
}
};
}
}
#[inline]
fn next(rev_id: i64) -> i64 {
rev_id + 1
}

View File

@ -0,0 +1,283 @@
use crate::{
entities::{
folder_info::{FolderDelta, FolderInfo},
revision::{RepeatedRevision, Revision},
text_block_info::TextBlockInfo,
},
errors::{CollaborateError, CollaborateResult},
protobuf::{
FolderInfo as FolderInfoPB, RepeatedRevision as RepeatedRevisionPB, Revision as RevisionPB,
TextBlockInfo as TextBlockInfoPB,
},
};
use dissimilar::Chunk;
use lib_ot::core::{DeltaBuilder, FlowyStr};
use lib_ot::{
core::{Attributes, Delta, OperationTransformable, NEW_LINE, WHITESPACE},
rich_text::RichTextDelta,
};
use serde::de::DeserializeOwned;
use std::{
convert::TryInto,
sync::atomic::{AtomicI64, Ordering::SeqCst},
};
#[inline]
pub fn find_newline(s: &str) -> Option<usize> {
s.find(NEW_LINE)
}
#[inline]
pub fn is_newline(s: &str) -> bool {
s == NEW_LINE
}
#[inline]
pub fn is_whitespace(s: &str) -> bool {
s == WHITESPACE
}
#[inline]
pub fn contain_newline(s: &str) -> bool {
s.contains(NEW_LINE)
}
#[inline]
pub fn md5<T: AsRef<[u8]>>(data: T) -> String {
let md5 = format!("{:x}", md5::compute(data));
md5
}
#[derive(Debug)]
pub struct RevIdCounter(pub AtomicI64);
impl RevIdCounter {
pub fn new(n: i64) -> Self {
Self(AtomicI64::new(n))
}
pub fn next(&self) -> i64 {
let _ = self.0.fetch_add(1, SeqCst);
self.value()
}
pub fn value(&self) -> i64 {
self.0.load(SeqCst)
}
pub fn set(&self, n: i64) {
let _ = self.0.fetch_update(SeqCst, SeqCst, |_| Some(n));
}
}
#[tracing::instrument(level = "trace", skip(revisions), err)]
pub fn make_delta_from_revisions<T>(revisions: Vec<Revision>) -> CollaborateResult<Delta<T>>
where
T: Attributes + DeserializeOwned,
{
let mut delta = Delta::<T>::new();
for revision in revisions {
if revision.delta_data.is_empty() {
tracing::warn!("revision delta_data is empty");
}
let revision_delta = Delta::<T>::from_bytes(revision.delta_data).map_err(|e| {
let err_msg = format!("Deserialize remote revision failed: {:?}", e);
CollaborateError::internal().context(err_msg)
})?;
delta = delta.compose(&revision_delta)?;
}
Ok(delta)
}
pub fn make_delta_from_revision_pb<T>(revisions: Vec<RevisionPB>) -> CollaborateResult<Delta<T>>
where
T: Attributes + DeserializeOwned,
{
let mut new_delta = Delta::<T>::new();
for revision in revisions {
let delta = Delta::<T>::from_bytes(revision.delta_data).map_err(|e| {
let err_msg = format!("Deserialize remote revision failed: {:?}", e);
CollaborateError::internal().context(err_msg)
})?;
new_delta = new_delta.compose(&delta)?;
}
Ok(new_delta)
}
pub fn repeated_revision_from_revision_pbs(revisions: Vec<RevisionPB>) -> CollaborateResult<RepeatedRevision> {
let repeated_revision_pb = repeated_revision_pb_from_revisions(revisions);
repeated_revision_from_repeated_revision_pb(repeated_revision_pb)
}
pub fn repeated_revision_pb_from_revisions(revisions: Vec<RevisionPB>) -> RepeatedRevisionPB {
let mut repeated_revision_pb = RepeatedRevisionPB::new();
repeated_revision_pb.set_items(revisions.into());
repeated_revision_pb
}
pub fn repeated_revision_from_repeated_revision_pb(
repeated_revision: RepeatedRevisionPB,
) -> CollaborateResult<RepeatedRevision> {
repeated_revision
.try_into()
.map_err(|e| CollaborateError::internal().context(format!("Cast repeated revision failed: {:?}", e)))
}
pub fn pair_rev_id_from_revision_pbs(revisions: &[RevisionPB]) -> (i64, i64) {
let mut rev_id = 0;
revisions.iter().for_each(|revision| {
if rev_id < revision.rev_id {
rev_id = revision.rev_id;
}
});
if rev_id > 0 {
(rev_id - 1, rev_id)
} else {
(0, rev_id)
}
}
pub fn pair_rev_id_from_revisions(revisions: &[Revision]) -> (i64, i64) {
let mut rev_id = 0;
revisions.iter().for_each(|revision| {
if rev_id < revision.rev_id {
rev_id = revision.rev_id;
}
});
if rev_id > 0 {
(rev_id - 1, rev_id)
} else {
(0, rev_id)
}
}
#[inline]
pub fn make_folder_from_revisions_pb(
folder_id: &str,
revisions: RepeatedRevisionPB,
) -> Result<Option<FolderInfo>, CollaborateError> {
match make_folder_pb_from_revisions_pb(folder_id, revisions)? {
None => Ok(None),
Some(pb) => {
let folder_info: FolderInfo = pb.try_into().map_err(|e| CollaborateError::internal().context(e))?;
Ok(Some(folder_info))
}
}
}
#[inline]
pub fn make_folder_pb_from_revisions_pb(
folder_id: &str,
mut revisions: RepeatedRevisionPB,
) -> Result<Option<FolderInfoPB>, CollaborateError> {
let revisions = revisions.take_items();
if revisions.is_empty() {
return Ok(None);
}
let mut folder_delta = FolderDelta::new();
let mut base_rev_id = 0;
let mut rev_id = 0;
for revision in revisions {
base_rev_id = revision.base_rev_id;
rev_id = revision.rev_id;
if revision.delta_data.is_empty() {
tracing::warn!("revision delta_data is empty");
}
let delta = FolderDelta::from_bytes(revision.delta_data)?;
folder_delta = folder_delta.compose(&delta)?;
}
let text = folder_delta.to_delta_str();
let mut folder_info = FolderInfoPB::new();
folder_info.set_folder_id(folder_id.to_owned());
folder_info.set_text(text);
folder_info.set_base_rev_id(base_rev_id);
folder_info.set_rev_id(rev_id);
Ok(Some(folder_info))
}
#[inline]
pub fn make_document_info_from_revisions_pb(
doc_id: &str,
revisions: RepeatedRevisionPB,
) -> Result<Option<TextBlockInfo>, CollaborateError> {
match make_document_info_pb_from_revisions_pb(doc_id, revisions)? {
None => Ok(None),
Some(pb) => {
let document_info: TextBlockInfo = pb.try_into().map_err(|e| {
CollaborateError::internal().context(format!("Deserialize document info from pb failed: {}", e))
})?;
Ok(Some(document_info))
}
}
}
#[inline]
pub fn make_document_info_pb_from_revisions_pb(
doc_id: &str,
mut revisions: RepeatedRevisionPB,
) -> Result<Option<TextBlockInfoPB>, CollaborateError> {
let revisions = revisions.take_items();
if revisions.is_empty() {
return Ok(None);
}
let mut document_delta = RichTextDelta::new();
let mut base_rev_id = 0;
let mut rev_id = 0;
for revision in revisions {
base_rev_id = revision.base_rev_id;
rev_id = revision.rev_id;
if revision.delta_data.is_empty() {
tracing::warn!("revision delta_data is empty");
}
let delta = RichTextDelta::from_bytes(revision.delta_data)?;
document_delta = document_delta.compose(&delta)?;
}
let text = document_delta.to_delta_str();
let mut block_info = TextBlockInfoPB::new();
block_info.set_block_id(doc_id.to_owned());
block_info.set_text(text);
block_info.set_base_rev_id(base_rev_id);
block_info.set_rev_id(rev_id);
Ok(Some(block_info))
}
#[inline]
pub fn rev_id_from_str(s: &str) -> Result<i64, CollaborateError> {
let rev_id = s
.to_owned()
.parse::<i64>()
.map_err(|e| CollaborateError::internal().context(format!("Parse rev_id from {} failed. {}", s, e)))?;
Ok(rev_id)
}
pub fn cal_diff<T: Attributes>(old: String, new: String) -> Option<Delta<T>> {
let chunks = dissimilar::diff(&old, &new);
let mut delta_builder = DeltaBuilder::<T>::new();
for chunk in &chunks {
match chunk {
Chunk::Equal(s) => {
delta_builder = delta_builder.retain(FlowyStr::from(*s).utf16_size());
}
Chunk::Delete(s) => {
delta_builder = delta_builder.delete(FlowyStr::from(*s).utf16_size());
}
Chunk::Insert(s) => {
delta_builder = delta_builder.insert(*s);
}
}
}
let delta = delta_builder.build();
if delta.is_empty() {
None
} else {
Some(delta)
}
}