mirror of
https://github.com/AppFlowy-IO/AppFlowy.git
synced 2024-08-30 18:12:39 +00:00
fix: compose folder error
This commit is contained in:
parent
b16b61f46c
commit
8a0308703e
@ -226,7 +226,7 @@ impl RevisionObjectBuilder for BlockInfoBuilder {
|
||||
|
||||
Result::<BlockInfo, FlowyError>::Ok(BlockInfo {
|
||||
doc_id: object_id.to_owned(),
|
||||
text: delta.to_json(),
|
||||
text: delta.to_delta_json(),
|
||||
rev_id,
|
||||
base_rev_id,
|
||||
})
|
||||
|
@ -77,7 +77,7 @@ impl EditorTest {
|
||||
let delta = self.editor.doc_delta().await.unwrap();
|
||||
if expected_delta != delta {
|
||||
eprintln!("✅ expect: {}", expected,);
|
||||
eprintln!("❌ receive: {}", delta.to_json());
|
||||
eprintln!("❌ receive: {}", delta.to_delta_json());
|
||||
}
|
||||
assert_eq!(expected_delta, delta);
|
||||
}
|
||||
|
@ -774,7 +774,7 @@ fn delta_compose() {
|
||||
delta = delta.compose(&d).unwrap();
|
||||
}
|
||||
assert_eq!(
|
||||
delta.to_json(),
|
||||
delta.to_delta_json(),
|
||||
r#"[{"insert":"a"},{"insert":"\n","attributes":{"list":"unchecked"}},{"insert":"\n"}]"#
|
||||
);
|
||||
|
||||
|
@ -108,20 +108,20 @@ impl TestBuilder {
|
||||
TestOp::Insert(delta_i, s, index) => {
|
||||
let document = &mut self.documents[*delta_i];
|
||||
let delta = document.insert(*index, s).unwrap();
|
||||
tracing::debug!("Insert delta: {}", delta.to_json());
|
||||
tracing::debug!("Insert delta: {}", delta.to_delta_json());
|
||||
|
||||
self.deltas.insert(*delta_i, Some(delta));
|
||||
}
|
||||
TestOp::Delete(delta_i, iv) => {
|
||||
let document = &mut self.documents[*delta_i];
|
||||
let delta = document.replace(*iv, "").unwrap();
|
||||
tracing::trace!("Delete delta: {}", delta.to_json());
|
||||
tracing::trace!("Delete delta: {}", delta.to_delta_json());
|
||||
self.deltas.insert(*delta_i, Some(delta));
|
||||
}
|
||||
TestOp::Replace(delta_i, iv, s) => {
|
||||
let document = &mut self.documents[*delta_i];
|
||||
let delta = document.replace(*iv, s).unwrap();
|
||||
tracing::trace!("Replace delta: {}", delta.to_json());
|
||||
tracing::trace!("Replace delta: {}", delta.to_delta_json());
|
||||
self.deltas.insert(*delta_i, Some(delta));
|
||||
}
|
||||
TestOp::InsertBold(delta_i, s, iv) => {
|
||||
@ -133,7 +133,7 @@ impl TestBuilder {
|
||||
let document = &mut self.documents[*delta_i];
|
||||
let attribute = RichTextAttribute::Bold(*enable);
|
||||
let delta = document.format(*iv, attribute).unwrap();
|
||||
tracing::trace!("Bold delta: {}", delta.to_json());
|
||||
tracing::trace!("Bold delta: {}", delta.to_delta_json());
|
||||
self.deltas.insert(*delta_i, Some(delta));
|
||||
}
|
||||
TestOp::Italic(delta_i, iv, enable) => {
|
||||
@ -143,28 +143,28 @@ impl TestBuilder {
|
||||
false => RichTextAttribute::Italic(false),
|
||||
};
|
||||
let delta = document.format(*iv, attribute).unwrap();
|
||||
tracing::trace!("Italic delta: {}", delta.to_json());
|
||||
tracing::trace!("Italic delta: {}", delta.to_delta_json());
|
||||
self.deltas.insert(*delta_i, Some(delta));
|
||||
}
|
||||
TestOp::Header(delta_i, iv, level) => {
|
||||
let document = &mut self.documents[*delta_i];
|
||||
let attribute = RichTextAttribute::Header(*level);
|
||||
let delta = document.format(*iv, attribute).unwrap();
|
||||
tracing::trace!("Header delta: {}", delta.to_json());
|
||||
tracing::trace!("Header delta: {}", delta.to_delta_json());
|
||||
self.deltas.insert(*delta_i, Some(delta));
|
||||
}
|
||||
TestOp::Link(delta_i, iv, link) => {
|
||||
let document = &mut self.documents[*delta_i];
|
||||
let attribute = RichTextAttribute::Link(link.to_owned());
|
||||
let delta = document.format(*iv, attribute).unwrap();
|
||||
tracing::trace!("Link delta: {}", delta.to_json());
|
||||
tracing::trace!("Link delta: {}", delta.to_delta_json());
|
||||
self.deltas.insert(*delta_i, Some(delta));
|
||||
}
|
||||
TestOp::Bullet(delta_i, iv, enable) => {
|
||||
let document = &mut self.documents[*delta_i];
|
||||
let attribute = RichTextAttribute::Bullet(*enable);
|
||||
let delta = document.format(*iv, attribute).unwrap();
|
||||
tracing::debug!("Bullet delta: {}", delta.to_json());
|
||||
tracing::debug!("Bullet delta: {}", delta.to_delta_json());
|
||||
|
||||
self.deltas.insert(*delta_i, Some(delta));
|
||||
}
|
||||
@ -194,15 +194,15 @@ impl TestBuilder {
|
||||
let delta_a = &self.documents[*delta_a_i].delta();
|
||||
let delta_b = &self.documents[*delta_b_i].delta();
|
||||
tracing::debug!("Invert: ");
|
||||
tracing::debug!("a: {}", delta_a.to_json());
|
||||
tracing::debug!("b: {}", delta_b.to_json());
|
||||
tracing::debug!("a: {}", delta_a.to_delta_json());
|
||||
tracing::debug!("b: {}", delta_b.to_delta_json());
|
||||
|
||||
let (_, b_prime) = delta_a.transform(delta_b).unwrap();
|
||||
let undo = b_prime.invert(delta_a);
|
||||
|
||||
let new_delta = delta_a.compose(&b_prime).unwrap();
|
||||
tracing::debug!("new delta: {}", new_delta.to_json());
|
||||
tracing::debug!("undo delta: {}", undo.to_json());
|
||||
tracing::debug!("new delta: {}", new_delta.to_delta_json());
|
||||
tracing::debug!("undo delta: {}", undo.to_delta_json());
|
||||
|
||||
let new_delta_after_undo = new_delta.compose(&undo).unwrap();
|
||||
|
||||
@ -238,7 +238,7 @@ impl TestBuilder {
|
||||
}
|
||||
|
||||
TestOp::AssertPrimeJson(doc_i, expected) => {
|
||||
let prime_json = self.primes[*doc_i].as_ref().unwrap().to_json();
|
||||
let prime_json = self.primes[*doc_i].as_ref().unwrap().to_delta_json();
|
||||
let expected_prime: RichTextDelta = serde_json::from_str(expected).unwrap();
|
||||
let target_prime: RichTextDelta = serde_json::from_str(&prime_json).unwrap();
|
||||
|
||||
|
@ -92,7 +92,7 @@ fn delta_deserialize_null_test() {
|
||||
attribute.value = RichTextAttributeValue(None);
|
||||
let delta2 = DeltaBuilder::new().retain_with_attributes(7, attribute.into()).build();
|
||||
|
||||
assert_eq!(delta2.to_json(), r#"[{"retain":7,"attributes":{"bold":""}}]"#);
|
||||
assert_eq!(delta2.to_delta_json(), r#"[{"retain":7,"attributes":{"bold":""}}]"#);
|
||||
assert_eq!(delta1, delta2);
|
||||
}
|
||||
|
||||
|
@ -197,9 +197,9 @@ impl DefaultFolderBuilder {
|
||||
for app in workspace.apps.iter() {
|
||||
for (index, view) in app.belongings.iter().enumerate() {
|
||||
let view_data = if index == 0 {
|
||||
initial_read_me().to_json()
|
||||
initial_read_me().to_delta_json()
|
||||
} else {
|
||||
initial_delta().to_json()
|
||||
initial_delta().to_delta_json()
|
||||
};
|
||||
view_controller.set_latest_view(view);
|
||||
let delta_data = Bytes::from(view_data);
|
||||
|
@ -1,3 +1,4 @@
|
||||
use crate::controller::FolderId;
|
||||
use crate::{
|
||||
event_map::WorkspaceDatabase,
|
||||
services::persistence::{AppTableSql, TrashTableSql, ViewTableSql, WorkspaceTableSql},
|
||||
@ -10,9 +11,11 @@ use flowy_folder_data_model::entities::{
|
||||
view::{RepeatedView, View},
|
||||
workspace::Workspace,
|
||||
};
|
||||
use flowy_sync::{RevisionLoader, RevisionPersistence};
|
||||
use std::sync::Arc;
|
||||
|
||||
pub(crate) const V1_MIGRATION: &str = "FOLDER_V1_MIGRATION";
|
||||
const V1_MIGRATION: &str = "FOLDER_V1_MIGRATION";
|
||||
const V2_MIGRATION: &str = "FOLDER_V2_MIGRATION";
|
||||
|
||||
pub(crate) struct FolderMigration {
|
||||
user_id: String,
|
||||
@ -32,7 +35,7 @@ impl FolderMigration {
|
||||
if KV::get_bool(&key) {
|
||||
return Ok(None);
|
||||
}
|
||||
tracing::trace!("Run folder version 1 migrations");
|
||||
|
||||
let pool = self.database.db_pool()?;
|
||||
let conn = &*pool.get()?;
|
||||
let workspaces = conn.immediate_transaction::<_, FlowyError, _>(|| {
|
||||
@ -62,6 +65,7 @@ impl FolderMigration {
|
||||
})?;
|
||||
|
||||
if workspaces.is_empty() {
|
||||
tracing::trace!("Run folder v1 migration, but workspace is empty");
|
||||
KV::set_bool(&key, true);
|
||||
return Ok(None);
|
||||
}
|
||||
@ -73,6 +77,35 @@ impl FolderMigration {
|
||||
|
||||
let folder = FolderPad::new(workspaces, trash)?;
|
||||
KV::set_bool(&key, true);
|
||||
tracing::trace!("Run folder v1 migration");
|
||||
Ok(Some(folder))
|
||||
}
|
||||
|
||||
pub async fn run_v2_migration(&self, user_id: &str, folder_id: &FolderId) -> FlowyResult<Option<FolderPad>> {
|
||||
let key = md5(format!("{}{}", self.user_id, V2_MIGRATION));
|
||||
if KV::get_bool(&key) {
|
||||
return Ok(None);
|
||||
}
|
||||
let pool = self.database.db_pool()?;
|
||||
let rev_persistence = Arc::new(RevisionPersistence::new(user_id, folder_id.as_ref(), pool.clone()));
|
||||
let (revisions, _) = RevisionLoader {
|
||||
object_id: folder_id.as_ref().to_owned(),
|
||||
user_id: self.user_id.clone(),
|
||||
cloud: None,
|
||||
rev_persistence,
|
||||
}
|
||||
.load()
|
||||
.await?;
|
||||
|
||||
if revisions.is_empty() {
|
||||
tracing::trace!("Run folder v2 migration, but revision is empty");
|
||||
KV::set_bool(&key, true);
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let pad = FolderPad::from_revisions(revisions)?;
|
||||
KV::set_bool(&key, true);
|
||||
tracing::trace!("Run folder v2 migration");
|
||||
Ok(Some(pad))
|
||||
}
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ mod migration;
|
||||
pub mod version_1;
|
||||
mod version_2;
|
||||
|
||||
use flowy_collaboration::client_folder::initial_folder_delta;
|
||||
use flowy_collaboration::{
|
||||
client_folder::FolderPad,
|
||||
entities::revision::{Revision, RevisionState},
|
||||
@ -105,7 +106,10 @@ impl FolderPersistence {
|
||||
pub async fn initialize(&self, user_id: &str, folder_id: &FolderId) -> FlowyResult<()> {
|
||||
let migrations = FolderMigration::new(user_id, self.database.clone());
|
||||
if let Some(migrated_folder) = migrations.run_v1_migration()? {
|
||||
tracing::trace!("Save migration folder");
|
||||
self.save_folder(user_id, folder_id, migrated_folder).await?;
|
||||
}
|
||||
|
||||
if let Some(migrated_folder) = migrations.run_v2_migration(user_id, folder_id).await? {
|
||||
self.save_folder(user_id, folder_id, migrated_folder).await?;
|
||||
}
|
||||
|
||||
@ -114,7 +118,7 @@ impl FolderPersistence {
|
||||
|
||||
pub async fn save_folder(&self, user_id: &str, folder_id: &FolderId, folder: FolderPad) -> FlowyResult<()> {
|
||||
let pool = self.database.db_pool()?;
|
||||
let delta_data = folder.delta().to_bytes();
|
||||
let delta_data = initial_folder_delta(&folder)?.to_bytes();
|
||||
let md5 = folder.md5();
|
||||
let revision = Revision::new(folder_id.as_ref(), 0, 0, delta_data, user_id, md5);
|
||||
let record = RevisionRecord {
|
||||
@ -123,8 +127,7 @@ impl FolderPersistence {
|
||||
write_to_disk: true,
|
||||
};
|
||||
|
||||
let conn = pool.get()?;
|
||||
let disk_cache = mk_revision_disk_cache(user_id, pool);
|
||||
disk_cache.create_revision_records(vec![record], &conn)
|
||||
disk_cache.delete_and_insert_records(folder_id.as_ref(), None, vec![record])
|
||||
}
|
||||
}
|
||||
|
@ -109,13 +109,13 @@ pub async fn delete_app(sdk: &FlowySDKTest, app_id: &str) {
|
||||
.await;
|
||||
}
|
||||
|
||||
pub async fn create_view(sdk: &FlowySDKTest, app_id: &str, name: &str, desc: &str, view_type: ViewDataType) -> View {
|
||||
pub async fn create_view(sdk: &FlowySDKTest, app_id: &str, name: &str, desc: &str, data_type: ViewDataType) -> View {
|
||||
let request = CreateViewPayload {
|
||||
belong_to_id: app_id.to_string(),
|
||||
name: name.to_string(),
|
||||
desc: desc.to_string(),
|
||||
thumbnail: None,
|
||||
data_type: view_type,
|
||||
data_type,
|
||||
ext_data: "".to_string(),
|
||||
plugin_type: 0,
|
||||
};
|
||||
|
@ -67,7 +67,7 @@ fn crate_log_filter(level: String) -> String {
|
||||
filters.push(format!("flowy_folder={}", level));
|
||||
filters.push(format!("flowy_user={}", level));
|
||||
filters.push(format!("flowy_document={}", level));
|
||||
// filters.push(format!("flowy_collaboration={}", level));
|
||||
filters.push(format!("flowy_collaboration={}", "debug"));
|
||||
filters.push(format!("dart_notify={}", level));
|
||||
filters.push(format!("lib_ot={}", level));
|
||||
filters.push(format!("lib_ws={}", level));
|
||||
@ -76,7 +76,7 @@ fn crate_log_filter(level: String) -> String {
|
||||
filters.push(format!("dart_ffi={}", "info"));
|
||||
filters.push(format!("flowy_database={}", "info"));
|
||||
filters.push(format!("flowy_net={}", "info"));
|
||||
filters.push(format!("flowy_sync={}", "info"));
|
||||
filters.push(format!("flowy_sync={}", "trace"));
|
||||
filters.join(",")
|
||||
}
|
||||
|
||||
|
@ -55,8 +55,8 @@ impl RevisionManager {
|
||||
let (revisions, rev_id) = RevisionLoader {
|
||||
object_id: self.object_id.clone(),
|
||||
user_id: self.user_id.clone(),
|
||||
cloud,
|
||||
rev_cache: self.rev_persistence.clone(),
|
||||
cloud: Some(cloud),
|
||||
rev_persistence: self.rev_persistence.clone(),
|
||||
}
|
||||
.load()
|
||||
.await?;
|
||||
@ -155,23 +155,28 @@ impl RevisionManager {
|
||||
}
|
||||
}
|
||||
|
||||
struct RevisionLoader {
|
||||
object_id: String,
|
||||
user_id: String,
|
||||
cloud: Arc<dyn RevisionCloudService>,
|
||||
rev_cache: Arc<RevisionPersistence>,
|
||||
pub struct RevisionLoader {
|
||||
pub object_id: String,
|
||||
pub user_id: String,
|
||||
pub cloud: Option<Arc<dyn RevisionCloudService>>,
|
||||
pub rev_persistence: Arc<RevisionPersistence>,
|
||||
}
|
||||
|
||||
impl RevisionLoader {
|
||||
async fn load(&self) -> Result<(Vec<Revision>, i64), FlowyError> {
|
||||
let records = self.rev_cache.batch_get(&self.object_id)?;
|
||||
pub async fn load(&self) -> Result<(Vec<Revision>, i64), FlowyError> {
|
||||
let records = self.rev_persistence.batch_get(&self.object_id)?;
|
||||
let revisions: Vec<Revision>;
|
||||
let mut rev_id = 0;
|
||||
if records.is_empty() {
|
||||
let remote_revisions = self.cloud.fetch_object(&self.user_id, &self.object_id).await?;
|
||||
if records.is_empty() && self.cloud.is_some() {
|
||||
let remote_revisions = self
|
||||
.cloud
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.fetch_object(&self.user_id, &self.object_id)
|
||||
.await?;
|
||||
for revision in &remote_revisions {
|
||||
rev_id = revision.rev_id;
|
||||
let _ = self.rev_cache.add_ack_revision(revision).await?;
|
||||
let _ = self.rev_persistence.add_ack_revision(revision).await?;
|
||||
}
|
||||
revisions = remote_revisions;
|
||||
} else {
|
||||
@ -179,7 +184,7 @@ impl RevisionLoader {
|
||||
rev_id = record.revision.rev_id;
|
||||
if record.state == RevisionState::Sync {
|
||||
// Sync the records if their state is RevisionState::Sync.
|
||||
let _ = self.rev_cache.sync_revision(&record.revision).await?;
|
||||
let _ = self.rev_persistence.sync_revision(&record.revision).await?;
|
||||
}
|
||||
}
|
||||
revisions = records.into_iter().map(|record| record.revision).collect::<_>();
|
||||
|
@ -7,7 +7,7 @@ pub fn initial_delta() -> RichTextDelta {
|
||||
|
||||
#[inline]
|
||||
pub fn initial_delta_string() -> String {
|
||||
initial_delta().to_json()
|
||||
initial_delta().to_delta_json()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
@ -22,6 +22,6 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn load_read_me() {
|
||||
println!("{}", initial_read_me().to_json());
|
||||
println!("{}", initial_read_me().to_delta_json());
|
||||
}
|
||||
}
|
||||
|
@ -59,7 +59,7 @@ impl ClientDocument {
|
||||
}
|
||||
|
||||
pub fn to_json(&self) -> String {
|
||||
self.delta.to_json()
|
||||
self.delta.to_delta_json()
|
||||
}
|
||||
|
||||
pub fn to_bytes(&self) -> Vec<u8> {
|
||||
@ -84,7 +84,7 @@ impl ClientDocument {
|
||||
}
|
||||
|
||||
pub fn set_delta(&mut self, data: RichTextDelta) {
|
||||
tracing::trace!("document: {}", data.to_json());
|
||||
tracing::trace!("document: {}", data.to_delta_json());
|
||||
self.delta = data;
|
||||
|
||||
match &self.notify {
|
||||
@ -96,7 +96,7 @@ impl ClientDocument {
|
||||
}
|
||||
|
||||
pub fn compose_delta(&mut self, delta: RichTextDelta) -> Result<(), CollaborateError> {
|
||||
tracing::trace!("{} compose {}", &self.delta.to_json(), delta.to_json());
|
||||
tracing::trace!("{} compose {}", &self.delta.to_delta_json(), delta.to_delta_json());
|
||||
let composed_delta = self.delta.compose(&delta)?;
|
||||
let mut undo_delta = delta.invert(&self.delta);
|
||||
|
||||
|
@ -38,7 +38,9 @@ impl FolderPadBuilder {
|
||||
if delta.is_empty() {
|
||||
delta = default_folder_delta();
|
||||
}
|
||||
let folder_json = delta.apply("").unwrap();
|
||||
|
||||
// TODO: Reconvert from history if delta.to_str() failed.
|
||||
let folder_json = delta.to_str()?;
|
||||
let mut folder: FolderPad = serde_json::from_str(&folder_json).map_err(|e| {
|
||||
CollaborateError::internal().context(format!("Deserialize json to root folder failed: {}", e))
|
||||
})?;
|
||||
|
@ -26,6 +26,12 @@ pub fn default_folder_delta() -> FolderDelta {
|
||||
.build()
|
||||
}
|
||||
|
||||
pub fn initial_folder_delta(folder_pad: &FolderPad) -> CollaborateResult<FolderDelta> {
|
||||
let json = folder_pad.to_json()?;
|
||||
let delta = PlainTextDeltaBuilder::new().insert(&json).build();
|
||||
Ok(delta)
|
||||
}
|
||||
|
||||
impl std::default::Default for FolderPad {
|
||||
fn default() -> Self {
|
||||
FolderPad {
|
||||
|
@ -46,7 +46,7 @@ impl std::convert::TryFrom<Revision> for BlockInfo {
|
||||
}
|
||||
|
||||
let delta = RichTextDelta::from_bytes(&revision.delta_data)?;
|
||||
let doc_json = delta.to_json();
|
||||
let doc_json = delta.to_delta_json();
|
||||
|
||||
Ok(BlockInfo {
|
||||
doc_id: revision.object_id,
|
||||
|
@ -95,7 +95,7 @@ impl std::fmt::Debug for Revision {
|
||||
let _ = f.write_fmt(format_args!("rev_id {}, ", self.rev_id))?;
|
||||
match RichTextDelta::from_bytes(&self.delta_data) {
|
||||
Ok(delta) => {
|
||||
let _ = f.write_fmt(format_args!("delta {:?}", delta.to_json()))?;
|
||||
let _ = f.write_fmt(format_args!("delta {:?}", delta.to_delta_json()))?;
|
||||
}
|
||||
Err(e) => {
|
||||
let _ = f.write_fmt(format_args!("delta {:?}", e))?;
|
||||
|
@ -39,7 +39,7 @@ impl RevisionSyncObject<RichTextAttributes> for ServerDocument {
|
||||
}
|
||||
|
||||
fn to_json(&self) -> String {
|
||||
self.delta.to_json()
|
||||
self.delta.to_delta_json()
|
||||
}
|
||||
|
||||
fn set_delta(&mut self, new_delta: Delta<RichTextAttributes>) {
|
||||
|
@ -32,7 +32,7 @@ impl RevisionSyncObject<PlainTextAttributes> for ServerFolder {
|
||||
}
|
||||
|
||||
fn to_json(&self) -> String {
|
||||
self.delta.to_json()
|
||||
self.delta.to_delta_json()
|
||||
}
|
||||
|
||||
fn set_delta(&mut self, new_delta: PlainTextDelta) {
|
||||
|
@ -66,6 +66,7 @@ impl RevIdCounter {
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip(revisions), err)]
|
||||
pub fn make_delta_from_revisions<T>(revisions: Vec<Revision>) -> CollaborateResult<Delta<T>>
|
||||
where
|
||||
T: Attributes + DeserializeOwned,
|
||||
@ -186,7 +187,7 @@ pub fn make_folder_pb_from_revisions_pb(
|
||||
folder_delta = folder_delta.compose(&delta)?;
|
||||
}
|
||||
|
||||
let text = folder_delta.to_json();
|
||||
let text = folder_delta.to_delta_json();
|
||||
let mut folder_info = FolderInfoPB::new();
|
||||
folder_info.set_folder_id(folder_id.to_owned());
|
||||
folder_info.set_text(text);
|
||||
@ -236,7 +237,7 @@ pub fn make_document_info_pb_from_revisions_pb(
|
||||
document_delta = document_delta.compose(&delta)?;
|
||||
}
|
||||
|
||||
let text = document_delta.to_json();
|
||||
let text = document_delta.to_delta_json();
|
||||
let mut block_info = BlockInfoPB::new();
|
||||
block_info.set_doc_id(doc_id.to_owned());
|
||||
block_info.set_text(text);
|
||||
|
@ -151,7 +151,13 @@ where
|
||||
pub fn apply(&self, s: &str) -> Result<String, OTError> {
|
||||
let s: FlowyStr = s.into();
|
||||
if s.utf16_size() != self.utf16_base_len {
|
||||
return Err(ErrorBuilder::new(OTErrorCode::IncompatibleLength).build());
|
||||
return Err(ErrorBuilder::new(OTErrorCode::IncompatibleLength)
|
||||
.msg(format!(
|
||||
"Expected: {}, received: {}",
|
||||
self.utf16_base_len,
|
||||
s.utf16_size()
|
||||
))
|
||||
.build());
|
||||
}
|
||||
let mut new_s = String::new();
|
||||
let code_point_iter = &mut s.utf16_code_unit_iter();
|
||||
@ -515,12 +521,16 @@ impl<T> Delta<T>
|
||||
where
|
||||
T: Attributes + serde::Serialize,
|
||||
{
|
||||
pub fn to_json(&self) -> String {
|
||||
pub fn to_delta_json(&self) -> String {
|
||||
serde_json::to_string(self).unwrap_or_else(|_| "".to_owned())
|
||||
}
|
||||
|
||||
pub fn to_str(&self) -> Result<String, OTError> {
|
||||
self.apply("")
|
||||
}
|
||||
|
||||
pub fn to_bytes(&self) -> Bytes {
|
||||
let json = self.to_json();
|
||||
let json = self.to_delta_json();
|
||||
Bytes::from(json.into_bytes())
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user