feat: run rustfmt with custom defined fmt configuration (#1848)

* chore: update rustfmt

* chore: apply rustfmt format
This commit is contained in:
Nathan.fooo
2023-02-13 09:29:49 +08:00
committed by GitHub
parent e2496e734c
commit 6bb1c4e89c
459 changed files with 50554 additions and 46600 deletions

View File

@ -1,10 +1,10 @@
fn main() {
let crate_name = env!("CARGO_PKG_NAME");
flowy_codegen::protobuf_file::gen(crate_name);
let crate_name = env!("CARGO_PKG_NAME");
flowy_codegen::protobuf_file::gen(crate_name);
#[cfg(feature = "dart")]
flowy_codegen::dart_event::gen(crate_name);
#[cfg(feature = "dart")]
flowy_codegen::dart_event::gen(crate_name);
#[cfg(feature = "ts")]
flowy_codegen::ts_event::gen(crate_name);
#[cfg(feature = "ts")]
flowy_codegen::ts_event::gen(crate_name);
}

View File

@ -1,11 +1,11 @@
use crate::{
entities::parser::{
app::{AppColorStyle, AppIdentify, AppName},
workspace::WorkspaceIdentify,
},
entities::view::RepeatedViewPB,
errors::ErrorCode,
impl_def_and_def_mut,
entities::parser::{
app::{AppColorStyle, AppIdentify, AppName},
workspace::WorkspaceIdentify,
},
entities::view::RepeatedViewPB,
errors::ErrorCode,
impl_def_and_def_mut,
};
use flowy_derive::ProtoBuf;
use folder_model::AppRevision;
@ -13,207 +13,210 @@ use std::convert::TryInto;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct AppPB {
#[pb(index = 1)]
pub id: String,
#[pb(index = 1)]
pub id: String,
#[pb(index = 2)]
pub workspace_id: String,
#[pb(index = 2)]
pub workspace_id: String,
#[pb(index = 3)]
pub name: String,
#[pb(index = 3)]
pub name: String,
#[pb(index = 4)]
pub desc: String,
#[pb(index = 4)]
pub desc: String,
#[pb(index = 5)]
pub belongings: RepeatedViewPB,
#[pb(index = 5)]
pub belongings: RepeatedViewPB,
#[pb(index = 6)]
pub version: i64,
#[pb(index = 6)]
pub version: i64,
#[pb(index = 7)]
pub modified_time: i64,
#[pb(index = 7)]
pub modified_time: i64,
#[pb(index = 8)]
pub create_time: i64,
#[pb(index = 8)]
pub create_time: i64,
}
impl std::convert::From<AppRevision> for AppPB {
fn from(app_serde: AppRevision) -> Self {
AppPB {
id: app_serde.id,
workspace_id: app_serde.workspace_id,
name: app_serde.name,
desc: app_serde.desc,
belongings: app_serde.belongings.into(),
version: app_serde.version,
modified_time: app_serde.modified_time,
create_time: app_serde.create_time,
}
fn from(app_serde: AppRevision) -> Self {
AppPB {
id: app_serde.id,
workspace_id: app_serde.workspace_id,
name: app_serde.name,
desc: app_serde.desc,
belongings: app_serde.belongings.into(),
version: app_serde.version,
modified_time: app_serde.modified_time,
create_time: app_serde.create_time,
}
}
}
#[derive(Eq, PartialEq, Debug, Default, ProtoBuf, Clone)]
pub struct RepeatedAppPB {
#[pb(index = 1)]
pub items: Vec<AppPB>,
#[pb(index = 1)]
pub items: Vec<AppPB>,
}
impl_def_and_def_mut!(RepeatedAppPB, AppPB);
impl std::convert::From<Vec<AppRevision>> for RepeatedAppPB {
fn from(values: Vec<AppRevision>) -> Self {
let items = values.into_iter().map(|value| value.into()).collect::<Vec<AppPB>>();
RepeatedAppPB { items }
}
fn from(values: Vec<AppRevision>) -> Self {
let items = values
.into_iter()
.map(|value| value.into())
.collect::<Vec<AppPB>>();
RepeatedAppPB { items }
}
}
#[derive(ProtoBuf, Default)]
pub struct CreateAppPayloadPB {
#[pb(index = 1)]
pub workspace_id: String,
#[pb(index = 1)]
pub workspace_id: String,
#[pb(index = 2)]
pub name: String,
#[pb(index = 2)]
pub name: String,
#[pb(index = 3)]
pub desc: String,
#[pb(index = 3)]
pub desc: String,
#[pb(index = 4)]
pub color_style: ColorStylePB,
#[pb(index = 4)]
pub color_style: ColorStylePB,
}
#[derive(ProtoBuf, Default, Debug, Clone)]
pub struct ColorStylePB {
#[pb(index = 1)]
pub theme_color: String,
#[pb(index = 1)]
pub theme_color: String,
}
#[derive(Debug)]
pub struct CreateAppParams {
pub workspace_id: String,
pub name: String,
pub desc: String,
pub color_style: ColorStylePB,
pub workspace_id: String,
pub name: String,
pub desc: String,
pub color_style: ColorStylePB,
}
impl TryInto<CreateAppParams> for CreateAppPayloadPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_into(self) -> Result<CreateAppParams, Self::Error> {
let name = AppName::parse(self.name)?;
let id = WorkspaceIdentify::parse(self.workspace_id)?;
let color_style = AppColorStyle::parse(self.color_style.theme_color.clone())?;
fn try_into(self) -> Result<CreateAppParams, Self::Error> {
let name = AppName::parse(self.name)?;
let id = WorkspaceIdentify::parse(self.workspace_id)?;
let color_style = AppColorStyle::parse(self.color_style.theme_color.clone())?;
Ok(CreateAppParams {
workspace_id: id.0,
name: name.0,
desc: self.desc,
color_style: color_style.into(),
})
}
Ok(CreateAppParams {
workspace_id: id.0,
name: name.0,
desc: self.desc,
color_style: color_style.into(),
})
}
}
impl std::convert::From<AppColorStyle> for ColorStylePB {
fn from(data: AppColorStyle) -> Self {
ColorStylePB {
theme_color: data.theme_color,
}
fn from(data: AppColorStyle) -> Self {
ColorStylePB {
theme_color: data.theme_color,
}
}
}
#[derive(ProtoBuf, Default, Clone, Debug)]
pub struct AppIdPB {
#[pb(index = 1)]
pub value: String,
#[pb(index = 1)]
pub value: String,
}
impl AppIdPB {
pub fn new(app_id: &str) -> Self {
Self {
value: app_id.to_string(),
}
pub fn new(app_id: &str) -> Self {
Self {
value: app_id.to_string(),
}
}
}
#[derive(ProtoBuf, Default)]
pub struct UpdateAppPayloadPB {
#[pb(index = 1)]
pub app_id: String,
#[pb(index = 1)]
pub app_id: String,
#[pb(index = 2, one_of)]
pub name: Option<String>,
#[pb(index = 2, one_of)]
pub name: Option<String>,
#[pb(index = 3, one_of)]
pub desc: Option<String>,
#[pb(index = 3, one_of)]
pub desc: Option<String>,
#[pb(index = 4, one_of)]
pub color_style: Option<ColorStylePB>,
#[pb(index = 4, one_of)]
pub color_style: Option<ColorStylePB>,
#[pb(index = 5, one_of)]
pub is_trash: Option<bool>,
#[pb(index = 5, one_of)]
pub is_trash: Option<bool>,
}
#[derive(Debug, Clone)]
pub struct UpdateAppParams {
pub app_id: String,
pub app_id: String,
pub name: Option<String>,
pub name: Option<String>,
pub desc: Option<String>,
pub desc: Option<String>,
pub color_style: Option<ColorStylePB>,
pub color_style: Option<ColorStylePB>,
pub is_trash: Option<bool>,
pub is_trash: Option<bool>,
}
impl UpdateAppParams {
pub fn new(app_id: &str) -> Self {
Self {
app_id: app_id.to_string(),
name: None,
desc: None,
color_style: None,
is_trash: None,
}
pub fn new(app_id: &str) -> Self {
Self {
app_id: app_id.to_string(),
name: None,
desc: None,
color_style: None,
is_trash: None,
}
}
pub fn name(mut self, name: &str) -> Self {
self.name = Some(name.to_string());
self
}
pub fn name(mut self, name: &str) -> Self {
self.name = Some(name.to_string());
self
}
pub fn desc(mut self, desc: &str) -> Self {
self.desc = Some(desc.to_string());
self
}
pub fn desc(mut self, desc: &str) -> Self {
self.desc = Some(desc.to_string());
self
}
pub fn trash(mut self) -> Self {
self.is_trash = Some(true);
self
}
pub fn trash(mut self) -> Self {
self.is_trash = Some(true);
self
}
}
impl TryInto<UpdateAppParams> for UpdateAppPayloadPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_into(self) -> Result<UpdateAppParams, Self::Error> {
let app_id = AppIdentify::parse(self.app_id)?.0;
fn try_into(self) -> Result<UpdateAppParams, Self::Error> {
let app_id = AppIdentify::parse(self.app_id)?.0;
let name = match self.name {
None => None,
Some(name) => Some(AppName::parse(name)?.0),
};
let name = match self.name {
None => None,
Some(name) => Some(AppName::parse(name)?.0),
};
let color_style = match self.color_style {
None => None,
Some(color_style) => Some(AppColorStyle::parse(color_style.theme_color)?.into()),
};
let color_style = match self.color_style {
None => None,
Some(color_style) => Some(AppColorStyle::parse(color_style.theme_color)?.into()),
};
Ok(UpdateAppParams {
app_id,
name,
desc: self.desc,
color_style,
is_trash: self.is_trash,
})
}
Ok(UpdateAppParams {
app_id,
name,
desc: self.desc,
color_style,
is_trash: self.is_trash,
})
}
}

View File

@ -2,12 +2,12 @@ use crate::errors::ErrorCode;
#[derive(Debug)]
pub struct AppColorStyle {
pub theme_color: String,
pub theme_color: String,
}
impl AppColorStyle {
pub fn parse(theme_color: String) -> Result<AppColorStyle, ErrorCode> {
// TODO: verify the color style format
Ok(AppColorStyle { theme_color })
}
pub fn parse(theme_color: String) -> Result<AppColorStyle, ErrorCode> {
// TODO: verify the color style format
Ok(AppColorStyle { theme_color })
}
}

View File

@ -3,18 +3,18 @@ use unicode_segmentation::UnicodeSegmentation;
pub struct AppDesc(pub String);
impl AppDesc {
#[allow(dead_code)]
pub fn parse(s: String) -> Result<AppDesc, String> {
if s.graphemes(true).count() > 1024 {
return Err("Workspace description too long".to_string());
}
Ok(Self(s))
#[allow(dead_code)]
pub fn parse(s: String) -> Result<AppDesc, String> {
if s.graphemes(true).count() > 1024 {
return Err("Workspace description too long".to_string());
}
Ok(Self(s))
}
}
impl AsRef<str> for AppDesc {
fn as_ref(&self) -> &str {
&self.0
}
fn as_ref(&self) -> &str {
&self.0
}
}

View File

@ -4,17 +4,17 @@ use crate::errors::ErrorCode;
pub struct AppIdentify(pub String);
impl AppIdentify {
pub fn parse(s: String) -> Result<AppIdentify, ErrorCode> {
if s.trim().is_empty() {
return Err(ErrorCode::AppIdInvalid);
}
Ok(Self(s))
pub fn parse(s: String) -> Result<AppIdentify, ErrorCode> {
if s.trim().is_empty() {
return Err(ErrorCode::AppIdInvalid);
}
Ok(Self(s))
}
}
impl AsRef<str> for AppIdentify {
fn as_ref(&self) -> &str {
&self.0
}
fn as_ref(&self) -> &str {
&self.0
}
}

View File

@ -4,17 +4,17 @@ use crate::errors::ErrorCode;
pub struct AppName(pub String);
impl AppName {
pub fn parse(s: String) -> Result<AppName, ErrorCode> {
if s.trim().is_empty() {
return Err(ErrorCode::AppNameInvalid);
}
Ok(Self(s))
pub fn parse(s: String) -> Result<AppName, ErrorCode> {
if s.trim().is_empty() {
return Err(ErrorCode::AppNameInvalid);
}
Ok(Self(s))
}
}
impl AsRef<str> for AppName {
fn as_ref(&self) -> &str {
&self.0
}
fn as_ref(&self) -> &str {
&self.0
}
}

View File

@ -2,33 +2,33 @@
pub struct TrashIdentify(pub String);
impl TrashIdentify {
#[allow(dead_code)]
pub fn parse(s: String) -> Result<TrashIdentify, String> {
if s.trim().is_empty() {
return Err("Trash id can not be empty or whitespace".to_string());
}
Ok(Self(s))
#[allow(dead_code)]
pub fn parse(s: String) -> Result<TrashIdentify, String> {
if s.trim().is_empty() {
return Err("Trash id can not be empty or whitespace".to_string());
}
Ok(Self(s))
}
}
impl AsRef<str> for TrashIdentify {
fn as_ref(&self) -> &str {
&self.0
}
fn as_ref(&self) -> &str {
&self.0
}
}
#[derive(Debug)]
pub struct TrashIds(pub Vec<String>);
impl TrashIds {
#[allow(dead_code)]
pub fn parse(ids: Vec<String>) -> Result<TrashIds, String> {
let mut trash_ids = vec![];
for id in ids {
let id = TrashIdentify::parse(id)?;
trash_ids.push(id.0);
}
Ok(Self(trash_ids))
#[allow(dead_code)]
pub fn parse(ids: Vec<String>) -> Result<TrashIds, String> {
let mut trash_ids = vec![];
for id in ids {
let id = TrashIdentify::parse(id)?;
trash_ids.push(id.0);
}
Ok(Self(trash_ids))
}
}

View File

@ -5,17 +5,17 @@ use unicode_segmentation::UnicodeSegmentation;
pub struct ViewDesc(pub String);
impl ViewDesc {
pub fn parse(s: String) -> Result<ViewDesc, ErrorCode> {
if s.graphemes(true).count() > 1000 {
return Err(ErrorCode::ViewDescTooLong);
}
Ok(Self(s))
pub fn parse(s: String) -> Result<ViewDesc, ErrorCode> {
if s.graphemes(true).count() > 1000 {
return Err(ErrorCode::ViewDescTooLong);
}
Ok(Self(s))
}
}
impl AsRef<str> for ViewDesc {
fn as_ref(&self) -> &str {
&self.0
}
fn as_ref(&self) -> &str {
&self.0
}
}

View File

@ -4,17 +4,17 @@ use crate::errors::ErrorCode;
pub struct ViewIdentify(pub String);
impl ViewIdentify {
pub fn parse(s: String) -> Result<ViewIdentify, ErrorCode> {
if s.trim().is_empty() {
return Err(ErrorCode::ViewIdInvalid);
}
Ok(Self(s))
pub fn parse(s: String) -> Result<ViewIdentify, ErrorCode> {
if s.trim().is_empty() {
return Err(ErrorCode::ViewIdInvalid);
}
Ok(Self(s))
}
}
impl AsRef<str> for ViewIdentify {
fn as_ref(&self) -> &str {
&self.0
}
fn as_ref(&self) -> &str {
&self.0
}
}

View File

@ -5,21 +5,21 @@ use unicode_segmentation::UnicodeSegmentation;
pub struct ViewName(pub String);
impl ViewName {
pub fn parse(s: String) -> Result<ViewName, ErrorCode> {
if s.trim().is_empty() {
return Err(ErrorCode::ViewNameInvalid);
}
if s.graphemes(true).count() > 256 {
return Err(ErrorCode::ViewNameTooLong);
}
Ok(Self(s))
pub fn parse(s: String) -> Result<ViewName, ErrorCode> {
if s.trim().is_empty() {
return Err(ErrorCode::ViewNameInvalid);
}
if s.graphemes(true).count() > 256 {
return Err(ErrorCode::ViewNameTooLong);
}
Ok(Self(s))
}
}
impl AsRef<str> for ViewName {
fn as_ref(&self) -> &str {
&self.0
}
fn as_ref(&self) -> &str {
&self.0
}
}

View File

@ -4,18 +4,18 @@ use crate::errors::ErrorCode;
pub struct ViewThumbnail(pub String);
impl ViewThumbnail {
pub fn parse(s: String) -> Result<ViewThumbnail, ErrorCode> {
// if s.trim().is_empty() {
// return Err(format!("View thumbnail can not be empty or whitespace"));
// }
// TODO: verify the thumbnail url is valid or not
pub fn parse(s: String) -> Result<ViewThumbnail, ErrorCode> {
// if s.trim().is_empty() {
// return Err(format!("View thumbnail can not be empty or whitespace"));
// }
// TODO: verify the thumbnail url is valid or not
Ok(Self(s))
}
Ok(Self(s))
}
}
impl AsRef<str> for ViewThumbnail {
fn as_ref(&self) -> &str {
&self.0
}
fn as_ref(&self) -> &str {
&self.0
}
}

View File

@ -5,17 +5,17 @@ use unicode_segmentation::UnicodeSegmentation;
pub struct WorkspaceDesc(pub String);
impl WorkspaceDesc {
pub fn parse(s: String) -> Result<WorkspaceDesc, ErrorCode> {
if s.graphemes(true).count() > 1024 {
return Err(ErrorCode::WorkspaceNameTooLong);
}
Ok(Self(s))
pub fn parse(s: String) -> Result<WorkspaceDesc, ErrorCode> {
if s.graphemes(true).count() > 1024 {
return Err(ErrorCode::WorkspaceNameTooLong);
}
Ok(Self(s))
}
}
impl AsRef<str> for WorkspaceDesc {
fn as_ref(&self) -> &str {
&self.0
}
fn as_ref(&self) -> &str {
&self.0
}
}

View File

@ -4,17 +4,17 @@ use crate::errors::ErrorCode;
pub struct WorkspaceIdentify(pub String);
impl WorkspaceIdentify {
pub fn parse(s: String) -> Result<WorkspaceIdentify, ErrorCode> {
if s.trim().is_empty() {
return Err(ErrorCode::WorkspaceIdInvalid);
}
Ok(Self(s))
pub fn parse(s: String) -> Result<WorkspaceIdentify, ErrorCode> {
if s.trim().is_empty() {
return Err(ErrorCode::WorkspaceIdInvalid);
}
Ok(Self(s))
}
}
impl AsRef<str> for WorkspaceIdentify {
fn as_ref(&self) -> &str {
&self.0
}
fn as_ref(&self) -> &str {
&self.0
}
}

View File

@ -5,21 +5,21 @@ use unicode_segmentation::UnicodeSegmentation;
pub struct WorkspaceName(pub String);
impl WorkspaceName {
pub fn parse(s: String) -> Result<WorkspaceName, ErrorCode> {
if s.trim().is_empty() {
return Err(ErrorCode::WorkspaceNameInvalid);
}
if s.graphemes(true).count() > 256 {
return Err(ErrorCode::WorkspaceNameTooLong);
}
Ok(Self(s))
pub fn parse(s: String) -> Result<WorkspaceName, ErrorCode> {
if s.trim().is_empty() {
return Err(ErrorCode::WorkspaceNameInvalid);
}
if s.graphemes(true).count() > 256 {
return Err(ErrorCode::WorkspaceNameTooLong);
}
Ok(Self(s))
}
}
impl AsRef<str> for WorkspaceName {
fn as_ref(&self) -> &str {
&self.0
}
fn as_ref(&self) -> &str {
&self.0
}
}

View File

@ -6,178 +6,185 @@ use std::fmt::Formatter;
#[derive(Eq, PartialEq, ProtoBuf, Default, Debug, Clone)]
pub struct TrashPB {
#[pb(index = 1)]
pub id: String,
#[pb(index = 1)]
pub id: String,
#[pb(index = 2)]
pub name: String,
#[pb(index = 2)]
pub name: String,
#[pb(index = 3)]
pub modified_time: i64,
#[pb(index = 3)]
pub modified_time: i64,
#[pb(index = 4)]
pub create_time: i64,
#[pb(index = 4)]
pub create_time: i64,
#[pb(index = 5)]
pub ty: TrashType,
#[pb(index = 5)]
pub ty: TrashType,
}
impl std::convert::From<TrashRevision> for TrashPB {
fn from(trash_rev: TrashRevision) -> Self {
TrashPB {
id: trash_rev.id,
name: trash_rev.name,
modified_time: trash_rev.modified_time,
create_time: trash_rev.create_time,
ty: trash_rev.ty.into(),
}
fn from(trash_rev: TrashRevision) -> Self {
TrashPB {
id: trash_rev.id,
name: trash_rev.name,
modified_time: trash_rev.modified_time,
create_time: trash_rev.create_time,
ty: trash_rev.ty.into(),
}
}
}
impl std::convert::From<TrashPB> for TrashRevision {
fn from(trash: TrashPB) -> Self {
TrashRevision {
id: trash.id,
name: trash.name,
modified_time: trash.modified_time,
create_time: trash.create_time,
ty: trash.ty.into(),
}
fn from(trash: TrashPB) -> Self {
TrashRevision {
id: trash.id,
name: trash.name,
modified_time: trash.modified_time,
create_time: trash.create_time,
ty: trash.ty.into(),
}
}
}
#[derive(PartialEq, Eq, Debug, Default, ProtoBuf, Clone)]
pub struct RepeatedTrashPB {
#[pb(index = 1)]
pub items: Vec<TrashPB>,
#[pb(index = 1)]
pub items: Vec<TrashPB>,
}
impl_def_and_def_mut!(RepeatedTrashPB, TrashPB);
impl std::convert::From<Vec<TrashRevision>> for RepeatedTrashPB {
fn from(trash_revs: Vec<TrashRevision>) -> Self {
let items: Vec<TrashPB> = trash_revs.into_iter().map(|trash_rev| trash_rev.into()).collect();
RepeatedTrashPB { items }
}
fn from(trash_revs: Vec<TrashRevision>) -> Self {
let items: Vec<TrashPB> = trash_revs
.into_iter()
.map(|trash_rev| trash_rev.into())
.collect();
RepeatedTrashPB { items }
}
}
#[derive(Eq, PartialEq, Debug, ProtoBuf_Enum, Clone, Serialize, Deserialize)]
pub enum TrashType {
TrashUnknown = 0,
TrashView = 1,
TrashApp = 2,
TrashUnknown = 0,
TrashView = 1,
TrashApp = 2,
}
impl std::convert::TryFrom<i32> for TrashType {
type Error = String;
type Error = String;
fn try_from(value: i32) -> Result<Self, Self::Error> {
match value {
0 => Ok(TrashType::TrashUnknown),
1 => Ok(TrashType::TrashView),
2 => Ok(TrashType::TrashApp),
_ => Err(format!("Invalid trash type: {}", value)),
}
fn try_from(value: i32) -> Result<Self, Self::Error> {
match value {
0 => Ok(TrashType::TrashUnknown),
1 => Ok(TrashType::TrashView),
2 => Ok(TrashType::TrashApp),
_ => Err(format!("Invalid trash type: {}", value)),
}
}
}
impl std::convert::From<TrashTypeRevision> for TrashType {
fn from(rev: TrashTypeRevision) -> Self {
match rev {
TrashTypeRevision::Unknown => TrashType::TrashUnknown,
TrashTypeRevision::TrashView => TrashType::TrashView,
TrashTypeRevision::TrashApp => TrashType::TrashApp,
}
fn from(rev: TrashTypeRevision) -> Self {
match rev {
TrashTypeRevision::Unknown => TrashType::TrashUnknown,
TrashTypeRevision::TrashView => TrashType::TrashView,
TrashTypeRevision::TrashApp => TrashType::TrashApp,
}
}
}
impl std::convert::From<TrashType> for TrashTypeRevision {
fn from(rev: TrashType) -> Self {
match rev {
TrashType::TrashUnknown => TrashTypeRevision::Unknown,
TrashType::TrashView => TrashTypeRevision::TrashView,
TrashType::TrashApp => TrashTypeRevision::TrashApp,
}
fn from(rev: TrashType) -> Self {
match rev {
TrashType::TrashUnknown => TrashTypeRevision::Unknown,
TrashType::TrashView => TrashTypeRevision::TrashView,
TrashType::TrashApp => TrashTypeRevision::TrashApp,
}
}
}
impl std::default::Default for TrashType {
fn default() -> Self {
TrashType::TrashUnknown
}
fn default() -> Self {
TrashType::TrashUnknown
}
}
#[derive(PartialEq, Eq, ProtoBuf, Default, Debug, Clone)]
pub struct RepeatedTrashIdPB {
#[pb(index = 1)]
pub items: Vec<TrashIdPB>,
#[pb(index = 1)]
pub items: Vec<TrashIdPB>,
#[pb(index = 2)]
pub delete_all: bool,
#[pb(index = 2)]
pub delete_all: bool,
}
impl std::fmt::Display for RepeatedTrashIdPB {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str(&format!(
"{:?}",
&self.items.iter().map(|item| format!("{}", item)).collect::<Vec<_>>()
))
}
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str(&format!(
"{:?}",
&self
.items
.iter()
.map(|item| format!("{}", item))
.collect::<Vec<_>>()
))
}
}
impl RepeatedTrashIdPB {
pub fn all() -> RepeatedTrashIdPB {
RepeatedTrashIdPB {
items: vec![],
delete_all: true,
}
pub fn all() -> RepeatedTrashIdPB {
RepeatedTrashIdPB {
items: vec![],
delete_all: true,
}
}
}
impl std::convert::From<Vec<TrashIdPB>> for RepeatedTrashIdPB {
fn from(items: Vec<TrashIdPB>) -> Self {
RepeatedTrashIdPB {
items,
delete_all: false,
}
fn from(items: Vec<TrashIdPB>) -> Self {
RepeatedTrashIdPB {
items,
delete_all: false,
}
}
}
impl std::convert::From<Vec<TrashRevision>> for RepeatedTrashIdPB {
fn from(trash: Vec<TrashRevision>) -> Self {
let items = trash
.into_iter()
.map(|t| TrashIdPB {
id: t.id,
ty: t.ty.into(),
})
.collect::<Vec<_>>();
fn from(trash: Vec<TrashRevision>) -> Self {
let items = trash
.into_iter()
.map(|t| TrashIdPB {
id: t.id,
ty: t.ty.into(),
})
.collect::<Vec<_>>();
RepeatedTrashIdPB {
items,
delete_all: false,
}
RepeatedTrashIdPB {
items,
delete_all: false,
}
}
}
#[derive(PartialEq, Eq, ProtoBuf, Default, Debug, Clone)]
pub struct TrashIdPB {
#[pb(index = 1)]
pub id: String,
#[pb(index = 1)]
pub id: String,
#[pb(index = 2)]
pub ty: TrashType,
#[pb(index = 2)]
pub ty: TrashType,
}
impl std::fmt::Display for TrashIdPB {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str(&format!("{:?}:{}", self.ty, self.id))
}
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str(&format!("{:?}:{}", self.ty, self.id))
}
}
impl std::convert::From<&TrashRevision> for TrashIdPB {
fn from(trash: &TrashRevision) -> Self {
TrashIdPB {
id: trash.id.clone(),
ty: trash.ty.clone().into(),
}
fn from(trash: &TrashRevision) -> Self {
TrashIdPB {
id: trash.id.clone(),
ty: trash.ty.clone().into(),
}
}
}

View File

@ -1,10 +1,10 @@
use crate::{
entities::parser::{
app::AppIdentify,
view::{ViewDesc, ViewIdentify, ViewName, ViewThumbnail},
},
errors::ErrorCode,
impl_def_and_def_mut,
entities::parser::{
app::AppIdentify,
view::{ViewDesc, ViewIdentify, ViewName, ViewThumbnail},
},
errors::ErrorCode,
impl_def_and_def_mut,
};
use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
use folder_model::{gen_view_id, ViewDataFormatRevision, ViewLayoutTypeRevision, ViewRevision};
@ -12,326 +12,329 @@ use std::convert::TryInto;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct ViewPB {
#[pb(index = 1)]
pub id: String,
#[pb(index = 1)]
pub id: String,
#[pb(index = 2)]
pub app_id: String,
#[pb(index = 2)]
pub app_id: String,
#[pb(index = 3)]
pub name: String,
#[pb(index = 3)]
pub name: String,
#[pb(index = 4)]
pub data_format: ViewDataFormatPB,
#[pb(index = 4)]
pub data_format: ViewDataFormatPB,
#[pb(index = 5)]
pub modified_time: i64,
#[pb(index = 5)]
pub modified_time: i64,
#[pb(index = 6)]
pub create_time: i64,
#[pb(index = 6)]
pub create_time: i64,
#[pb(index = 7)]
pub layout: ViewLayoutTypePB,
#[pb(index = 7)]
pub layout: ViewLayoutTypePB,
}
impl std::convert::From<ViewRevision> for ViewPB {
fn from(rev: ViewRevision) -> Self {
ViewPB {
id: rev.id,
app_id: rev.app_id,
name: rev.name,
data_format: rev.data_format.into(),
modified_time: rev.modified_time,
create_time: rev.create_time,
layout: rev.layout.into(),
}
fn from(rev: ViewRevision) -> Self {
ViewPB {
id: rev.id,
app_id: rev.app_id,
name: rev.name,
data_format: rev.data_format.into(),
modified_time: rev.modified_time,
create_time: rev.create_time,
layout: rev.layout.into(),
}
}
}
#[derive(Eq, PartialEq, Hash, Debug, ProtoBuf_Enum, Clone)]
pub enum ViewDataFormatPB {
/// Indicate this view is using `Delta` for the persistence data format, it's deprecated.
DeltaFormat = 0,
/// Indicate this view is using `Database` for the persistence data format. It is used in AppFlowy database
/// views including Grid,Board, and Calendar.
DatabaseFormat = 1,
/// Indicate this view is using `Node` for the persistence data format. It is used in AppFlowy document
NodeFormat = 2,
/// Indicate this view is using `Delta` for the persistence data format, it's deprecated.
DeltaFormat = 0,
/// Indicate this view is using `Database` for the persistence data format. It is used in AppFlowy database
/// views including Grid,Board, and Calendar.
DatabaseFormat = 1,
/// Indicate this view is using `Node` for the persistence data format. It is used in AppFlowy document
NodeFormat = 2,
}
impl std::default::Default for ViewDataFormatPB {
fn default() -> Self {
ViewDataFormatRevision::default().into()
}
fn default() -> Self {
ViewDataFormatRevision::default().into()
}
}
impl std::convert::From<ViewDataFormatRevision> for ViewDataFormatPB {
fn from(rev: ViewDataFormatRevision) -> Self {
match rev {
ViewDataFormatRevision::DeltaFormat => ViewDataFormatPB::DeltaFormat,
ViewDataFormatRevision::DatabaseFormat => ViewDataFormatPB::DatabaseFormat,
ViewDataFormatRevision::NodeFormat => ViewDataFormatPB::NodeFormat,
}
fn from(rev: ViewDataFormatRevision) -> Self {
match rev {
ViewDataFormatRevision::DeltaFormat => ViewDataFormatPB::DeltaFormat,
ViewDataFormatRevision::DatabaseFormat => ViewDataFormatPB::DatabaseFormat,
ViewDataFormatRevision::NodeFormat => ViewDataFormatPB::NodeFormat,
}
}
}
impl std::convert::From<ViewDataFormatPB> for ViewDataFormatRevision {
fn from(ty: ViewDataFormatPB) -> Self {
match ty {
ViewDataFormatPB::DeltaFormat => ViewDataFormatRevision::DeltaFormat,
ViewDataFormatPB::DatabaseFormat => ViewDataFormatRevision::DatabaseFormat,
ViewDataFormatPB::NodeFormat => ViewDataFormatRevision::NodeFormat,
}
fn from(ty: ViewDataFormatPB) -> Self {
match ty {
ViewDataFormatPB::DeltaFormat => ViewDataFormatRevision::DeltaFormat,
ViewDataFormatPB::DatabaseFormat => ViewDataFormatRevision::DatabaseFormat,
ViewDataFormatPB::NodeFormat => ViewDataFormatRevision::NodeFormat,
}
}
}
#[derive(Eq, PartialEq, Hash, Debug, ProtoBuf_Enum, Clone)]
pub enum ViewLayoutTypePB {
Document = 0,
Grid = 3,
Board = 4,
Calendar = 5,
Document = 0,
Grid = 3,
Board = 4,
Calendar = 5,
}
impl std::default::Default for ViewLayoutTypePB {
fn default() -> Self {
ViewLayoutTypePB::Grid
}
fn default() -> Self {
ViewLayoutTypePB::Grid
}
}
impl std::convert::From<ViewLayoutTypeRevision> for ViewLayoutTypePB {
fn from(rev: ViewLayoutTypeRevision) -> Self {
match rev {
ViewLayoutTypeRevision::Grid => ViewLayoutTypePB::Grid,
ViewLayoutTypeRevision::Board => ViewLayoutTypePB::Board,
ViewLayoutTypeRevision::Document => ViewLayoutTypePB::Document,
ViewLayoutTypeRevision::Calendar => ViewLayoutTypePB::Calendar,
}
fn from(rev: ViewLayoutTypeRevision) -> Self {
match rev {
ViewLayoutTypeRevision::Grid => ViewLayoutTypePB::Grid,
ViewLayoutTypeRevision::Board => ViewLayoutTypePB::Board,
ViewLayoutTypeRevision::Document => ViewLayoutTypePB::Document,
ViewLayoutTypeRevision::Calendar => ViewLayoutTypePB::Calendar,
}
}
}
impl std::convert::From<ViewLayoutTypePB> for ViewLayoutTypeRevision {
fn from(rev: ViewLayoutTypePB) -> Self {
match rev {
ViewLayoutTypePB::Grid => ViewLayoutTypeRevision::Grid,
ViewLayoutTypePB::Board => ViewLayoutTypeRevision::Board,
ViewLayoutTypePB::Document => ViewLayoutTypeRevision::Document,
ViewLayoutTypePB::Calendar => ViewLayoutTypeRevision::Calendar,
}
fn from(rev: ViewLayoutTypePB) -> Self {
match rev {
ViewLayoutTypePB::Grid => ViewLayoutTypeRevision::Grid,
ViewLayoutTypePB::Board => ViewLayoutTypeRevision::Board,
ViewLayoutTypePB::Document => ViewLayoutTypeRevision::Document,
ViewLayoutTypePB::Calendar => ViewLayoutTypeRevision::Calendar,
}
}
}
#[derive(Eq, PartialEq, Debug, Default, ProtoBuf, Clone)]
pub struct RepeatedViewPB {
#[pb(index = 1)]
pub items: Vec<ViewPB>,
#[pb(index = 1)]
pub items: Vec<ViewPB>,
}
impl_def_and_def_mut!(RepeatedViewPB, ViewPB);
impl std::convert::From<Vec<ViewRevision>> for RepeatedViewPB {
fn from(values: Vec<ViewRevision>) -> Self {
let items = values.into_iter().map(|value| value.into()).collect::<Vec<ViewPB>>();
RepeatedViewPB { items }
}
fn from(values: Vec<ViewRevision>) -> Self {
let items = values
.into_iter()
.map(|value| value.into())
.collect::<Vec<ViewPB>>();
RepeatedViewPB { items }
}
}
#[derive(Default, ProtoBuf)]
pub struct RepeatedViewIdPB {
#[pb(index = 1)]
pub items: Vec<String>,
#[pb(index = 1)]
pub items: Vec<String>,
}
#[derive(Default, ProtoBuf)]
pub struct CreateViewPayloadPB {
#[pb(index = 1)]
pub belong_to_id: String,
#[pb(index = 1)]
pub belong_to_id: String,
#[pb(index = 2)]
pub name: String,
#[pb(index = 2)]
pub name: String,
#[pb(index = 3)]
pub desc: String,
#[pb(index = 3)]
pub desc: String,
#[pb(index = 4, one_of)]
pub thumbnail: Option<String>,
#[pb(index = 4, one_of)]
pub thumbnail: Option<String>,
#[pb(index = 5)]
pub data_format: ViewDataFormatPB,
#[pb(index = 5)]
pub data_format: ViewDataFormatPB,
#[pb(index = 6)]
pub layout: ViewLayoutTypePB,
#[pb(index = 6)]
pub layout: ViewLayoutTypePB,
#[pb(index = 7)]
pub initial_data: Vec<u8>,
#[pb(index = 7)]
pub initial_data: Vec<u8>,
}
#[derive(Debug, Clone)]
pub struct CreateViewParams {
pub belong_to_id: String,
pub name: String,
pub desc: String,
pub thumbnail: String,
pub data_format: ViewDataFormatPB,
pub layout: ViewLayoutTypePB,
pub view_id: String,
pub initial_data: Vec<u8>,
pub belong_to_id: String,
pub name: String,
pub desc: String,
pub thumbnail: String,
pub data_format: ViewDataFormatPB,
pub layout: ViewLayoutTypePB,
pub view_id: String,
pub initial_data: Vec<u8>,
}
impl TryInto<CreateViewParams> for CreateViewPayloadPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_into(self) -> Result<CreateViewParams, Self::Error> {
let name = ViewName::parse(self.name)?.0;
let belong_to_id = AppIdentify::parse(self.belong_to_id)?.0;
let view_id = gen_view_id();
let thumbnail = match self.thumbnail {
None => "".to_string(),
Some(thumbnail) => ViewThumbnail::parse(thumbnail)?.0,
};
fn try_into(self) -> Result<CreateViewParams, Self::Error> {
let name = ViewName::parse(self.name)?.0;
let belong_to_id = AppIdentify::parse(self.belong_to_id)?.0;
let view_id = gen_view_id();
let thumbnail = match self.thumbnail {
None => "".to_string(),
Some(thumbnail) => ViewThumbnail::parse(thumbnail)?.0,
};
Ok(CreateViewParams {
belong_to_id,
name,
desc: self.desc,
data_format: self.data_format,
layout: self.layout,
thumbnail,
view_id,
initial_data: self.initial_data,
})
}
Ok(CreateViewParams {
belong_to_id,
name,
desc: self.desc,
data_format: self.data_format,
layout: self.layout,
thumbnail,
view_id,
initial_data: self.initial_data,
})
}
}
#[derive(Default, ProtoBuf, Clone, Debug)]
pub struct ViewIdPB {
#[pb(index = 1)]
pub value: String,
#[pb(index = 1)]
pub value: String,
}
impl std::convert::From<&str> for ViewIdPB {
fn from(value: &str) -> Self {
ViewIdPB {
value: value.to_string(),
}
fn from(value: &str) -> Self {
ViewIdPB {
value: value.to_string(),
}
}
}
#[derive(Default, ProtoBuf, Clone, Debug)]
pub struct DeletedViewPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2, one_of)]
pub index: Option<i32>,
#[pb(index = 2, one_of)]
pub index: Option<i32>,
}
impl std::ops::Deref for ViewIdPB {
type Target = str;
type Target = str;
fn deref(&self) -> &Self::Target {
&self.value
}
fn deref(&self) -> &Self::Target {
&self.value
}
}
#[derive(Default, ProtoBuf)]
pub struct UpdateViewPayloadPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2, one_of)]
pub name: Option<String>,
#[pb(index = 2, one_of)]
pub name: Option<String>,
#[pb(index = 3, one_of)]
pub desc: Option<String>,
#[pb(index = 3, one_of)]
pub desc: Option<String>,
#[pb(index = 4, one_of)]
pub thumbnail: Option<String>,
#[pb(index = 4, one_of)]
pub thumbnail: Option<String>,
}
#[derive(Clone, Debug)]
pub struct UpdateViewParams {
pub view_id: String,
pub name: Option<String>,
pub desc: Option<String>,
pub thumbnail: Option<String>,
pub view_id: String,
pub name: Option<String>,
pub desc: Option<String>,
pub thumbnail: Option<String>,
}
impl TryInto<UpdateViewParams> for UpdateViewPayloadPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_into(self) -> Result<UpdateViewParams, Self::Error> {
let view_id = ViewIdentify::parse(self.view_id)?.0;
fn try_into(self) -> Result<UpdateViewParams, Self::Error> {
let view_id = ViewIdentify::parse(self.view_id)?.0;
let name = match self.name {
None => None,
Some(name) => Some(ViewName::parse(name)?.0),
};
let name = match self.name {
None => None,
Some(name) => Some(ViewName::parse(name)?.0),
};
let desc = match self.desc {
None => None,
Some(desc) => Some(ViewDesc::parse(desc)?.0),
};
let desc = match self.desc {
None => None,
Some(desc) => Some(ViewDesc::parse(desc)?.0),
};
let thumbnail = match self.thumbnail {
None => None,
Some(thumbnail) => Some(ViewThumbnail::parse(thumbnail)?.0),
};
let thumbnail = match self.thumbnail {
None => None,
Some(thumbnail) => Some(ViewThumbnail::parse(thumbnail)?.0),
};
Ok(UpdateViewParams {
view_id,
name,
desc,
thumbnail,
})
}
Ok(UpdateViewParams {
view_id,
name,
desc,
thumbnail,
})
}
}
#[derive(ProtoBuf_Enum)]
pub enum MoveFolderItemType {
MoveApp = 0,
MoveView = 1,
MoveApp = 0,
MoveView = 1,
}
impl std::default::Default for MoveFolderItemType {
fn default() -> Self {
MoveFolderItemType::MoveApp
}
fn default() -> Self {
MoveFolderItemType::MoveApp
}
}
#[derive(Default, ProtoBuf)]
pub struct MoveFolderItemPayloadPB {
#[pb(index = 1)]
pub item_id: String,
#[pb(index = 1)]
pub item_id: String,
#[pb(index = 2)]
pub from: i32,
#[pb(index = 2)]
pub from: i32,
#[pb(index = 3)]
pub to: i32,
#[pb(index = 3)]
pub to: i32,
#[pb(index = 4)]
pub ty: MoveFolderItemType,
#[pb(index = 4)]
pub ty: MoveFolderItemType,
}
pub struct MoveFolderItemParams {
pub item_id: String,
pub from: usize,
pub to: usize,
pub ty: MoveFolderItemType,
pub item_id: String,
pub from: usize,
pub to: usize,
pub ty: MoveFolderItemType,
}
impl TryInto<MoveFolderItemParams> for MoveFolderItemPayloadPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_into(self) -> Result<MoveFolderItemParams, Self::Error> {
let view_id = ViewIdentify::parse(self.item_id)?.0;
Ok(MoveFolderItemParams {
item_id: view_id,
from: self.from as usize,
to: self.to as usize,
ty: self.ty,
})
}
fn try_into(self) -> Result<MoveFolderItemParams, Self::Error> {
let view_id = ViewIdentify::parse(self.item_id)?.0;
Ok(MoveFolderItemParams {
item_id: view_id,
from: self.from as usize,
to: self.to as usize,
ty: self.ty,
})
}
}
// impl<'de> Deserialize<'de> for ViewDataType {

View File

@ -1,8 +1,8 @@
use crate::{
entities::parser::workspace::{WorkspaceDesc, WorkspaceIdentify, WorkspaceName},
entities::{app::RepeatedAppPB, view::ViewPB},
errors::*,
impl_def_and_def_mut,
entities::parser::workspace::{WorkspaceDesc, WorkspaceIdentify, WorkspaceName},
entities::{app::RepeatedAppPB, view::ViewPB},
errors::*,
impl_def_and_def_mut,
};
use flowy_derive::ProtoBuf;
use folder_model::WorkspaceRevision;
@ -10,129 +10,131 @@ use std::convert::TryInto;
#[derive(Eq, PartialEq, ProtoBuf, Default, Debug, Clone)]
pub struct WorkspacePB {
#[pb(index = 1)]
pub id: String,
#[pb(index = 1)]
pub id: String,
#[pb(index = 2)]
pub name: String,
#[pb(index = 2)]
pub name: String,
#[pb(index = 3)]
pub desc: String,
#[pb(index = 3)]
pub desc: String,
#[pb(index = 4)]
pub apps: RepeatedAppPB,
#[pb(index = 4)]
pub apps: RepeatedAppPB,
#[pb(index = 5)]
pub modified_time: i64,
#[pb(index = 5)]
pub modified_time: i64,
#[pb(index = 6)]
pub create_time: i64,
#[pb(index = 6)]
pub create_time: i64,
}
impl std::convert::From<WorkspaceRevision> for WorkspacePB {
fn from(workspace_serde: WorkspaceRevision) -> Self {
WorkspacePB {
id: workspace_serde.id,
name: workspace_serde.name,
desc: workspace_serde.desc,
apps: workspace_serde.apps.into(),
modified_time: workspace_serde.modified_time,
create_time: workspace_serde.create_time,
}
fn from(workspace_serde: WorkspaceRevision) -> Self {
WorkspacePB {
id: workspace_serde.id,
name: workspace_serde.name,
desc: workspace_serde.desc,
apps: workspace_serde.apps.into(),
modified_time: workspace_serde.modified_time,
create_time: workspace_serde.create_time,
}
}
}
#[derive(PartialEq, Eq, Debug, Default, ProtoBuf)]
pub struct RepeatedWorkspacePB {
#[pb(index = 1)]
pub items: Vec<WorkspacePB>,
#[pb(index = 1)]
pub items: Vec<WorkspacePB>,
}
impl_def_and_def_mut!(RepeatedWorkspacePB, WorkspacePB);
#[derive(ProtoBuf, Default)]
pub struct CreateWorkspacePayloadPB {
#[pb(index = 1)]
pub name: String,
#[pb(index = 1)]
pub name: String,
#[pb(index = 2)]
pub desc: String,
#[pb(index = 2)]
pub desc: String,
}
#[derive(Clone, Debug)]
pub struct CreateWorkspaceParams {
pub name: String,
pub desc: String,
pub name: String,
pub desc: String,
}
impl TryInto<CreateWorkspaceParams> for CreateWorkspacePayloadPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_into(self) -> Result<CreateWorkspaceParams, Self::Error> {
let name = WorkspaceName::parse(self.name)?;
let desc = WorkspaceDesc::parse(self.desc)?;
fn try_into(self) -> Result<CreateWorkspaceParams, Self::Error> {
let name = WorkspaceName::parse(self.name)?;
let desc = WorkspaceDesc::parse(self.desc)?;
Ok(CreateWorkspaceParams {
name: name.0,
desc: desc.0,
})
}
Ok(CreateWorkspaceParams {
name: name.0,
desc: desc.0,
})
}
}
// Read all workspaces if the workspace_id is None
#[derive(Clone, ProtoBuf, Default, Debug)]
pub struct WorkspaceIdPB {
#[pb(index = 1, one_of)]
pub value: Option<String>,
#[pb(index = 1, one_of)]
pub value: Option<String>,
}
impl WorkspaceIdPB {
pub fn new(workspace_id: Option<String>) -> Self {
Self { value: workspace_id }
pub fn new(workspace_id: Option<String>) -> Self {
Self {
value: workspace_id,
}
}
}
#[derive(Default, ProtoBuf, Clone)]
pub struct WorkspaceSettingPB {
#[pb(index = 1)]
pub workspace: WorkspacePB,
#[pb(index = 1)]
pub workspace: WorkspacePB,
#[pb(index = 2, one_of)]
pub latest_view: Option<ViewPB>,
#[pb(index = 2, one_of)]
pub latest_view: Option<ViewPB>,
}
#[derive(ProtoBuf, Default)]
pub struct UpdateWorkspacePayloadPB {
#[pb(index = 1)]
pub id: String,
#[pb(index = 1)]
pub id: String,
#[pb(index = 2, one_of)]
pub name: Option<String>,
#[pb(index = 2, one_of)]
pub name: Option<String>,
#[pb(index = 3, one_of)]
pub desc: Option<String>,
#[pb(index = 3, one_of)]
pub desc: Option<String>,
}
#[derive(Clone, Debug)]
pub struct UpdateWorkspaceParams {
pub id: String,
pub name: Option<String>,
pub desc: Option<String>,
pub id: String,
pub name: Option<String>,
pub desc: Option<String>,
}
impl TryInto<UpdateWorkspaceParams> for UpdateWorkspacePayloadPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_into(self) -> Result<UpdateWorkspaceParams, Self::Error> {
let name = match self.name {
None => None,
Some(name) => Some(WorkspaceName::parse(name)?.0),
};
let id = WorkspaceIdentify::parse(self.id)?;
fn try_into(self) -> Result<UpdateWorkspaceParams, Self::Error> {
let name = match self.name {
None => None,
Some(name) => Some(WorkspaceName::parse(name)?.0),
};
let id = WorkspaceIdentify::parse(self.id)?;
Ok(UpdateWorkspaceParams {
id: id.0,
name,
desc: self.desc,
})
}
Ok(UpdateWorkspaceParams {
id: id.0,
name,
desc: self.desc,
})
}
}

View File

@ -1,13 +1,16 @@
use crate::{
entities::{
app::{AppIdPB, CreateAppParams, UpdateAppParams},
trash::RepeatedTrashIdPB,
view::{CreateViewParams, RepeatedViewIdPB, UpdateViewParams, ViewIdPB},
workspace::{CreateWorkspaceParams, UpdateWorkspaceParams, WorkspaceIdPB},
},
errors::FlowyError,
manager::FolderManager,
services::{app::event_handler::*, trash::event_handler::*, view::event_handler::*, workspace::event_handler::*},
entities::{
app::{AppIdPB, CreateAppParams, UpdateAppParams},
trash::RepeatedTrashIdPB,
view::{CreateViewParams, RepeatedViewIdPB, UpdateViewParams, ViewIdPB},
workspace::{CreateWorkspaceParams, UpdateWorkspaceParams, WorkspaceIdPB},
},
errors::FlowyError,
manager::FolderManager,
services::{
app::event_handler::*, trash::event_handler::*, view::event_handler::*,
workspace::event_handler::*,
},
};
use flowy_derive::{Flowy_Event, ProtoBuf_Enum};
use flowy_sqlite::{ConnectionPool, DBConnection};
@ -20,204 +23,228 @@ use strum_macros::Display;
pub trait WorkspaceDeps: WorkspaceUser + WorkspaceDatabase {}
pub trait WorkspaceUser: Send + Sync {
fn user_id(&self) -> Result<String, FlowyError>;
fn token(&self) -> Result<String, FlowyError>;
fn user_id(&self) -> Result<String, FlowyError>;
fn token(&self) -> Result<String, FlowyError>;
}
pub trait WorkspaceDatabase: Send + Sync {
fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError>;
fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError>;
fn db_connection(&self) -> Result<DBConnection, FlowyError> {
let pool = self.db_pool()?;
let conn = pool.get().map_err(|e| FlowyError::internal().context(e))?;
Ok(conn)
}
fn db_connection(&self) -> Result<DBConnection, FlowyError> {
let pool = self.db_pool()?;
let conn = pool.get().map_err(|e| FlowyError::internal().context(e))?;
Ok(conn)
}
}
pub fn init(folder: Arc<FolderManager>) -> AFPlugin {
let mut plugin = AFPlugin::new()
.name("Flowy-Workspace")
.state(folder.workspace_controller.clone())
.state(folder.app_controller.clone())
.state(folder.view_controller.clone())
.state(folder.trash_controller.clone())
.state(folder.clone());
let mut plugin = AFPlugin::new()
.name("Flowy-Workspace")
.state(folder.workspace_controller.clone())
.state(folder.app_controller.clone())
.state(folder.view_controller.clone())
.state(folder.trash_controller.clone())
.state(folder.clone());
// Workspace
plugin = plugin
.event(FolderEvent::CreateWorkspace, create_workspace_handler)
.event(FolderEvent::ReadCurrentWorkspace, read_cur_workspace_handler)
.event(FolderEvent::ReadWorkspaces, read_workspaces_handler)
.event(FolderEvent::OpenWorkspace, open_workspace_handler)
.event(FolderEvent::ReadWorkspaceApps, read_workspace_apps_handler);
// Workspace
plugin = plugin
.event(FolderEvent::CreateWorkspace, create_workspace_handler)
.event(
FolderEvent::ReadCurrentWorkspace,
read_cur_workspace_handler,
)
.event(FolderEvent::ReadWorkspaces, read_workspaces_handler)
.event(FolderEvent::OpenWorkspace, open_workspace_handler)
.event(FolderEvent::ReadWorkspaceApps, read_workspace_apps_handler);
// App
plugin = plugin
.event(FolderEvent::CreateApp, create_app_handler)
.event(FolderEvent::ReadApp, read_app_handler)
.event(FolderEvent::UpdateApp, update_app_handler)
.event(FolderEvent::DeleteApp, delete_app_handler);
// App
plugin = plugin
.event(FolderEvent::CreateApp, create_app_handler)
.event(FolderEvent::ReadApp, read_app_handler)
.event(FolderEvent::UpdateApp, update_app_handler)
.event(FolderEvent::DeleteApp, delete_app_handler);
// View
plugin = plugin
.event(FolderEvent::CreateView, create_view_handler)
.event(FolderEvent::ReadView, read_view_handler)
.event(FolderEvent::UpdateView, update_view_handler)
.event(FolderEvent::DeleteView, delete_view_handler)
.event(FolderEvent::DuplicateView, duplicate_view_handler)
.event(FolderEvent::SetLatestView, set_latest_view_handler)
.event(FolderEvent::CloseView, close_view_handler)
.event(FolderEvent::MoveItem, move_item_handler);
// View
plugin = plugin
.event(FolderEvent::CreateView, create_view_handler)
.event(FolderEvent::ReadView, read_view_handler)
.event(FolderEvent::UpdateView, update_view_handler)
.event(FolderEvent::DeleteView, delete_view_handler)
.event(FolderEvent::DuplicateView, duplicate_view_handler)
.event(FolderEvent::SetLatestView, set_latest_view_handler)
.event(FolderEvent::CloseView, close_view_handler)
.event(FolderEvent::MoveItem, move_item_handler);
// Trash
plugin = plugin
.event(FolderEvent::ReadTrash, read_trash_handler)
.event(FolderEvent::PutbackTrash, putback_trash_handler)
.event(FolderEvent::DeleteTrash, delete_trash_handler)
.event(FolderEvent::RestoreAllTrash, restore_all_trash_handler)
.event(FolderEvent::DeleteAllTrash, delete_all_trash_handler);
// Trash
plugin = plugin
.event(FolderEvent::ReadTrash, read_trash_handler)
.event(FolderEvent::PutbackTrash, putback_trash_handler)
.event(FolderEvent::DeleteTrash, delete_trash_handler)
.event(FolderEvent::RestoreAllTrash, restore_all_trash_handler)
.event(FolderEvent::DeleteAllTrash, delete_all_trash_handler);
plugin
plugin
}
#[derive(Clone, Copy, PartialEq, Eq, Debug, Display, Hash, ProtoBuf_Enum, Flowy_Event)]
#[event_err = "FlowyError"]
pub enum FolderEvent {
/// Create a new workspace
#[event(input = "CreateWorkspacePayloadPB", output = "WorkspacePB")]
CreateWorkspace = 0,
/// Create a new workspace
#[event(input = "CreateWorkspacePayloadPB", output = "WorkspacePB")]
CreateWorkspace = 0,
/// Read the current opening workspace
#[event(output = "WorkspaceSettingPB")]
ReadCurrentWorkspace = 1,
/// Read the current opening workspace
#[event(output = "WorkspaceSettingPB")]
ReadCurrentWorkspace = 1,
/// Open the workspace and mark it as the current workspace
#[event(input = "WorkspaceIdPB", output = "RepeatedWorkspacePB")]
ReadWorkspaces = 2,
/// Open the workspace and mark it as the current workspace
#[event(input = "WorkspaceIdPB", output = "RepeatedWorkspacePB")]
ReadWorkspaces = 2,
/// Delete the workspace
#[event(input = "WorkspaceIdPB")]
DeleteWorkspace = 3,
/// Delete the workspace
#[event(input = "WorkspaceIdPB")]
DeleteWorkspace = 3,
/// Open the workspace and mark it as the current workspace
#[event(input = "WorkspaceIdPB", output = "WorkspacePB")]
OpenWorkspace = 4,
/// Open the workspace and mark it as the current workspace
#[event(input = "WorkspaceIdPB", output = "WorkspacePB")]
OpenWorkspace = 4,
/// Return a list of apps that belong to this workspace
#[event(input = "WorkspaceIdPB", output = "RepeatedAppPB")]
ReadWorkspaceApps = 5,
/// Return a list of apps that belong to this workspace
#[event(input = "WorkspaceIdPB", output = "RepeatedAppPB")]
ReadWorkspaceApps = 5,
/// Create a new app
#[event(input = "CreateAppPayloadPB", output = "AppPB")]
CreateApp = 101,
/// Create a new app
#[event(input = "CreateAppPayloadPB", output = "AppPB")]
CreateApp = 101,
/// Delete the app
#[event(input = "AppIdPB")]
DeleteApp = 102,
/// Delete the app
#[event(input = "AppIdPB")]
DeleteApp = 102,
/// Read the app
#[event(input = "AppIdPB", output = "AppPB")]
ReadApp = 103,
/// Read the app
#[event(input = "AppIdPB", output = "AppPB")]
ReadApp = 103,
/// Update the app's properties including the name,description, etc.
#[event(input = "UpdateAppPayloadPB")]
UpdateApp = 104,
/// Update the app's properties including the name,description, etc.
#[event(input = "UpdateAppPayloadPB")]
UpdateApp = 104,
/// Create a new view in the corresponding app
#[event(input = "CreateViewPayloadPB", output = "ViewPB")]
CreateView = 201,
/// Create a new view in the corresponding app
#[event(input = "CreateViewPayloadPB", output = "ViewPB")]
CreateView = 201,
/// Return the view info
#[event(input = "ViewIdPB", output = "ViewPB")]
ReadView = 202,
/// Return the view info
#[event(input = "ViewIdPB", output = "ViewPB")]
ReadView = 202,
/// Update the view's properties including the name,description, etc.
#[event(input = "UpdateViewPayloadPB", output = "ViewPB")]
UpdateView = 203,
/// Update the view's properties including the name,description, etc.
#[event(input = "UpdateViewPayloadPB", output = "ViewPB")]
UpdateView = 203,
/// Move the view to the trash folder
#[event(input = "RepeatedViewIdPB")]
DeleteView = 204,
/// Move the view to the trash folder
#[event(input = "RepeatedViewIdPB")]
DeleteView = 204,
/// Duplicate the view
#[event(input = "ViewPB")]
DuplicateView = 205,
/// Duplicate the view
#[event(input = "ViewPB")]
DuplicateView = 205,
/// Close and release the resources that are used by this view.
/// It should get called when the 'View' page get destroy
#[event(input = "ViewIdPB")]
CloseView = 206,
/// Close and release the resources that are used by this view.
/// It should get called when the 'View' page get destroy
#[event(input = "ViewIdPB")]
CloseView = 206,
#[event()]
CopyLink = 220,
#[event()]
CopyLink = 220,
/// Set the current visiting view
#[event(input = "ViewIdPB")]
SetLatestView = 221,
/// Set the current visiting view
#[event(input = "ViewIdPB")]
SetLatestView = 221,
/// Move the view or app to another place
#[event(input = "MoveFolderItemPayloadPB")]
MoveItem = 230,
/// Move the view or app to another place
#[event(input = "MoveFolderItemPayloadPB")]
MoveItem = 230,
/// Read the trash that was deleted by the user
#[event(output = "RepeatedTrashPB")]
ReadTrash = 300,
/// Read the trash that was deleted by the user
#[event(output = "RepeatedTrashPB")]
ReadTrash = 300,
/// Put back the trash to the origin folder
#[event(input = "TrashIdPB")]
PutbackTrash = 301,
/// Put back the trash to the origin folder
#[event(input = "TrashIdPB")]
PutbackTrash = 301,
/// Delete the trash from the disk
#[event(input = "RepeatedTrashIdPB")]
DeleteTrash = 302,
/// Delete the trash from the disk
#[event(input = "RepeatedTrashIdPB")]
DeleteTrash = 302,
/// Put back all the trash to its original folder
#[event()]
RestoreAllTrash = 303,
/// Put back all the trash to its original folder
#[event()]
RestoreAllTrash = 303,
/// Delete all the trash from the disk
#[event()]
DeleteAllTrash = 304,
/// Delete all the trash from the disk
#[event()]
DeleteAllTrash = 304,
}
pub trait FolderCouldServiceV1: Send + Sync {
fn init(&self);
fn init(&self);
// Workspace
fn create_workspace(
&self,
token: &str,
params: CreateWorkspaceParams,
) -> FutureResult<WorkspaceRevision, FlowyError>;
// Workspace
fn create_workspace(
&self,
token: &str,
params: CreateWorkspaceParams,
) -> FutureResult<WorkspaceRevision, FlowyError>;
fn read_workspace(&self, token: &str, params: WorkspaceIdPB) -> FutureResult<Vec<WorkspaceRevision>, FlowyError>;
fn read_workspace(
&self,
token: &str,
params: WorkspaceIdPB,
) -> FutureResult<Vec<WorkspaceRevision>, FlowyError>;
fn update_workspace(&self, token: &str, params: UpdateWorkspaceParams) -> FutureResult<(), FlowyError>;
fn update_workspace(
&self,
token: &str,
params: UpdateWorkspaceParams,
) -> FutureResult<(), FlowyError>;
fn delete_workspace(&self, token: &str, params: WorkspaceIdPB) -> FutureResult<(), FlowyError>;
fn delete_workspace(&self, token: &str, params: WorkspaceIdPB) -> FutureResult<(), FlowyError>;
// View
fn create_view(&self, token: &str, params: CreateViewParams) -> FutureResult<ViewRevision, FlowyError>;
// View
fn create_view(
&self,
token: &str,
params: CreateViewParams,
) -> FutureResult<ViewRevision, FlowyError>;
fn read_view(&self, token: &str, params: ViewIdPB) -> FutureResult<Option<ViewRevision>, FlowyError>;
fn read_view(
&self,
token: &str,
params: ViewIdPB,
) -> FutureResult<Option<ViewRevision>, FlowyError>;
fn delete_view(&self, token: &str, params: RepeatedViewIdPB) -> FutureResult<(), FlowyError>;
fn delete_view(&self, token: &str, params: RepeatedViewIdPB) -> FutureResult<(), FlowyError>;
fn update_view(&self, token: &str, params: UpdateViewParams) -> FutureResult<(), FlowyError>;
fn update_view(&self, token: &str, params: UpdateViewParams) -> FutureResult<(), FlowyError>;
// App
fn create_app(&self, token: &str, params: CreateAppParams) -> FutureResult<AppRevision, FlowyError>;
// App
fn create_app(
&self,
token: &str,
params: CreateAppParams,
) -> FutureResult<AppRevision, FlowyError>;
fn read_app(&self, token: &str, params: AppIdPB) -> FutureResult<Option<AppRevision>, FlowyError>;
fn read_app(&self, token: &str, params: AppIdPB)
-> FutureResult<Option<AppRevision>, FlowyError>;
fn update_app(&self, token: &str, params: UpdateAppParams) -> FutureResult<(), FlowyError>;
fn update_app(&self, token: &str, params: UpdateAppParams) -> FutureResult<(), FlowyError>;
fn delete_app(&self, token: &str, params: AppIdPB) -> FutureResult<(), FlowyError>;
fn delete_app(&self, token: &str, params: AppIdPB) -> FutureResult<(), FlowyError>;
// Trash
fn create_trash(&self, token: &str, params: RepeatedTrashIdPB) -> FutureResult<(), FlowyError>;
// Trash
fn create_trash(&self, token: &str, params: RepeatedTrashIdPB) -> FutureResult<(), FlowyError>;
fn delete_trash(&self, token: &str, params: RepeatedTrashIdPB) -> FutureResult<(), FlowyError>;
fn delete_trash(&self, token: &str, params: RepeatedTrashIdPB) -> FutureResult<(), FlowyError>;
fn read_trash(&self, token: &str) -> FutureResult<Vec<TrashRevision>, FlowyError>;
fn read_trash(&self, token: &str) -> FutureResult<Vec<TrashRevision>, FlowyError>;
}

View File

@ -14,9 +14,9 @@ pub mod protobuf;
mod util;
pub mod prelude {
pub use crate::{errors::*, event_map::*};
pub use crate::{errors::*, event_map::*};
}
pub mod errors {
pub use flowy_error::{internal_error, ErrorCode, FlowyError, FlowyResult};
pub use flowy_error::{internal_error, ErrorCode, FlowyError, FlowyResult};
}

View File

@ -11,39 +11,39 @@
#[macro_export]
macro_rules! impl_def_and_def_mut {
($target:ident, $item: ident) => {
impl std::ops::Deref for $target {
type Target = Vec<$item>;
($target:ident, $item: ident) => {
impl std::ops::Deref for $target {
type Target = Vec<$item>;
fn deref(&self) -> &Self::Target {
&self.items
}
}
impl std::ops::DerefMut for $target {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.items
}
fn deref(&self) -> &Self::Target {
&self.items
}
}
impl std::ops::DerefMut for $target {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.items
}
}
impl $target {
#[allow(dead_code)]
pub fn into_inner(&mut self) -> Vec<$item> {
::std::mem::take(&mut self.items)
}
#[allow(dead_code)]
pub fn push(&mut self, item: $item) {
if self.items.contains(&item) {
log::error!("add duplicate item: {:?}", item);
return;
}
impl $target {
#[allow(dead_code)]
pub fn into_inner(&mut self) -> Vec<$item> {
::std::mem::take(&mut self.items)
}
self.items.push(item);
}
#[allow(dead_code)]
pub fn push(&mut self, item: $item) {
if self.items.contains(&item) {
log::error!("add duplicate item: {:?}", item);
return;
}
self.items.push(item);
}
pub fn first_or_crash(&self) -> &$item {
self.items.first().unwrap()
}
}
};
pub fn first_or_crash(&self) -> &$item {
self.items.first().unwrap()
}
}
};
}

View File

@ -2,26 +2,28 @@ use crate::entities::view::ViewDataFormatPB;
use crate::entities::{ViewLayoutTypePB, ViewPB};
use crate::services::folder_editor::FolderRevisionMergeable;
use crate::{
entities::workspace::RepeatedWorkspacePB,
errors::FlowyResult,
event_map::{FolderCouldServiceV1, WorkspaceDatabase, WorkspaceUser},
notification::{send_notification, FolderNotification},
services::{
folder_editor::FolderEditor, persistence::FolderPersistence, set_current_workspace, AppController,
TrashController, ViewController, WorkspaceController,
},
entities::workspace::RepeatedWorkspacePB,
errors::FlowyResult,
event_map::{FolderCouldServiceV1, WorkspaceDatabase, WorkspaceUser},
notification::{send_notification, FolderNotification},
services::{
folder_editor::FolderEditor, persistence::FolderPersistence, set_current_workspace,
AppController, TrashController, ViewController, WorkspaceController,
},
};
use bytes::Bytes;
use flowy_document::editor::initial_read_me;
use flowy_error::FlowyError;
use flowy_revision::{RevisionManager, RevisionPersistence, RevisionPersistenceConfiguration, RevisionWebSocket};
use flowy_revision::{
RevisionManager, RevisionPersistence, RevisionPersistenceConfiguration, RevisionWebSocket,
};
use folder_model::user_default;
use lazy_static::lazy_static;
use lib_infra::future::FutureResult;
use crate::services::clear_current_workspace;
use crate::services::persistence::rev_sqlite::{
SQLiteFolderRevisionPersistence, SQLiteFolderRevisionSnapshotPersistence,
SQLiteFolderRevisionPersistence, SQLiteFolderRevisionSnapshotPersistence,
};
use flowy_client_sync::client_folder::FolderPad;
use std::convert::TryFrom;
@ -29,268 +31,288 @@ use std::{collections::HashMap, fmt::Formatter, sync::Arc};
use tokio::sync::RwLock as TokioRwLock;
use ws_model::ws_revision::ServerRevisionWSData;
lazy_static! {
static ref INIT_FOLDER_FLAG: TokioRwLock<HashMap<String, bool>> = TokioRwLock::new(HashMap::new());
static ref INIT_FOLDER_FLAG: TokioRwLock<HashMap<String, bool>> =
TokioRwLock::new(HashMap::new());
}
const FOLDER_ID: &str = "folder";
const FOLDER_ID_SPLIT: &str = ":";
#[derive(Clone)]
pub struct FolderId(String);
impl FolderId {
pub fn new(user_id: &str) -> Self {
Self(format!("{}{}{}", user_id, FOLDER_ID_SPLIT, FOLDER_ID))
}
pub fn new(user_id: &str) -> Self {
Self(format!("{}{}{}", user_id, FOLDER_ID_SPLIT, FOLDER_ID))
}
}
impl std::fmt::Display for FolderId {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str(FOLDER_ID)
}
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str(FOLDER_ID)
}
}
impl std::fmt::Debug for FolderId {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str(FOLDER_ID)
}
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str(FOLDER_ID)
}
}
impl AsRef<str> for FolderId {
fn as_ref(&self) -> &str {
&self.0
}
fn as_ref(&self) -> &str {
&self.0
}
}
pub struct FolderManager {
pub user: Arc<dyn WorkspaceUser>,
pub(crate) persistence: Arc<FolderPersistence>,
pub(crate) workspace_controller: Arc<WorkspaceController>,
pub(crate) app_controller: Arc<AppController>,
pub(crate) view_controller: Arc<ViewController>,
pub(crate) trash_controller: Arc<TrashController>,
web_socket: Arc<dyn RevisionWebSocket>,
folder_editor: Arc<TokioRwLock<Option<Arc<FolderEditor>>>>,
pub user: Arc<dyn WorkspaceUser>,
pub(crate) persistence: Arc<FolderPersistence>,
pub(crate) workspace_controller: Arc<WorkspaceController>,
pub(crate) app_controller: Arc<AppController>,
pub(crate) view_controller: Arc<ViewController>,
pub(crate) trash_controller: Arc<TrashController>,
web_socket: Arc<dyn RevisionWebSocket>,
folder_editor: Arc<TokioRwLock<Option<Arc<FolderEditor>>>>,
}
impl FolderManager {
pub async fn new(
user: Arc<dyn WorkspaceUser>,
cloud_service: Arc<dyn FolderCouldServiceV1>,
database: Arc<dyn WorkspaceDatabase>,
data_processors: ViewDataProcessorMap,
web_socket: Arc<dyn RevisionWebSocket>,
) -> Self {
if let Ok(user_id) = user.user_id() {
// Reset the flag if the folder manager gets initialized, otherwise,
// the folder_editor will not be initialized after flutter hot reload.
INIT_FOLDER_FLAG.write().await.insert(user_id.to_owned(), false);
}
let folder_editor = Arc::new(TokioRwLock::new(None));
let persistence = Arc::new(FolderPersistence::new(database.clone(), folder_editor.clone()));
let trash_controller = Arc::new(TrashController::new(
persistence.clone(),
cloud_service.clone(),
user.clone(),
));
let view_controller = Arc::new(ViewController::new(
user.clone(),
persistence.clone(),
cloud_service.clone(),
trash_controller.clone(),
data_processors,
));
let app_controller = Arc::new(AppController::new(
user.clone(),
persistence.clone(),
trash_controller.clone(),
cloud_service.clone(),
));
let workspace_controller = Arc::new(WorkspaceController::new(
user.clone(),
persistence.clone(),
trash_controller.clone(),
cloud_service.clone(),
));
Self {
user,
persistence,
workspace_controller,
app_controller,
view_controller,
trash_controller,
web_socket,
folder_editor,
}
pub async fn new(
user: Arc<dyn WorkspaceUser>,
cloud_service: Arc<dyn FolderCouldServiceV1>,
database: Arc<dyn WorkspaceDatabase>,
data_processors: ViewDataProcessorMap,
web_socket: Arc<dyn RevisionWebSocket>,
) -> Self {
if let Ok(user_id) = user.user_id() {
// Reset the flag if the folder manager gets initialized, otherwise,
// the folder_editor will not be initialized after flutter hot reload.
INIT_FOLDER_FLAG
.write()
.await
.insert(user_id.to_owned(), false);
}
// pub fn network_state_changed(&self, new_type: NetworkType) {
// match new_type {
// NetworkType::UnknownNetworkType => {},
// NetworkType::Wifi => {},
// NetworkType::Cell => {},
// NetworkType::Ethernet => {},
// }
// }
let folder_editor = Arc::new(TokioRwLock::new(None));
let persistence = Arc::new(FolderPersistence::new(
database.clone(),
folder_editor.clone(),
));
pub async fn did_receive_ws_data(&self, data: Bytes) {
let result = ServerRevisionWSData::try_from(data);
match result {
Ok(data) => match self.folder_editor.read().await.clone() {
None => {}
Some(editor) => match editor.receive_ws_data(data).await {
Ok(_) => {}
Err(e) => tracing::error!("Folder receive data error: {:?}", e),
},
},
Err(e) => {
tracing::error!("Folder ws data parser failed: {:?}", e);
}
}
let trash_controller = Arc::new(TrashController::new(
persistence.clone(),
cloud_service.clone(),
user.clone(),
));
let view_controller = Arc::new(ViewController::new(
user.clone(),
persistence.clone(),
cloud_service.clone(),
trash_controller.clone(),
data_processors,
));
let app_controller = Arc::new(AppController::new(
user.clone(),
persistence.clone(),
trash_controller.clone(),
cloud_service.clone(),
));
let workspace_controller = Arc::new(WorkspaceController::new(
user.clone(),
persistence.clone(),
trash_controller.clone(),
cloud_service.clone(),
));
Self {
user,
persistence,
workspace_controller,
app_controller,
view_controller,
trash_controller,
web_socket,
folder_editor,
}
}
/// Called immediately after the application launched with the user sign in/sign up.
#[tracing::instrument(level = "trace", skip(self), err)]
pub async fn initialize(&self, user_id: &str, token: &str) -> FlowyResult<()> {
let mut write_guard = INIT_FOLDER_FLAG.write().await;
if let Some(is_init) = write_guard.get(user_id) {
if *is_init {
return Ok(());
}
}
tracing::debug!("Initialize folder editor");
let folder_id = FolderId::new(user_id);
self.persistence.initialize(user_id, &folder_id).await?;
// pub fn network_state_changed(&self, new_type: NetworkType) {
// match new_type {
// NetworkType::UnknownNetworkType => {},
// NetworkType::Wifi => {},
// NetworkType::Cell => {},
// NetworkType::Ethernet => {},
// }
// }
let pool = self.persistence.db_pool()?;
let object_id = folder_id.as_ref();
let disk_cache = SQLiteFolderRevisionPersistence::new(user_id, pool.clone());
let configuration = RevisionPersistenceConfiguration::new(200, false);
let rev_persistence = RevisionPersistence::new(user_id, object_id, disk_cache, configuration);
let rev_compactor = FolderRevisionMergeable();
let snapshot_object_id = format!("folder:{}", object_id);
let snapshot_persistence = SQLiteFolderRevisionSnapshotPersistence::new(&snapshot_object_id, pool);
let rev_manager = RevisionManager::new(
user_id,
folder_id.as_ref(),
rev_persistence,
rev_compactor,
snapshot_persistence,
);
let folder_editor = FolderEditor::new(user_id, &folder_id, token, rev_manager, self.web_socket.clone()).await?;
*self.folder_editor.write().await = Some(Arc::new(folder_editor));
self.app_controller.initialize()?;
self.view_controller.initialize()?;
write_guard.insert(user_id.to_owned(), true);
Ok(())
pub async fn did_receive_ws_data(&self, data: Bytes) {
let result = ServerRevisionWSData::try_from(data);
match result {
Ok(data) => match self.folder_editor.read().await.clone() {
None => {},
Some(editor) => match editor.receive_ws_data(data).await {
Ok(_) => {},
Err(e) => tracing::error!("Folder receive data error: {:?}", e),
},
},
Err(e) => {
tracing::error!("Folder ws data parser failed: {:?}", e);
},
}
}
pub async fn initialize_with_new_user(
&self,
user_id: &str,
token: &str,
view_data_format: ViewDataFormatPB,
) -> FlowyResult<()> {
DefaultFolderBuilder::build(
token,
user_id,
self.persistence.clone(),
self.view_controller.clone(),
|| (view_data_format.clone(), Bytes::from(initial_read_me())),
)
.await?;
self.initialize(user_id, token).await
/// Called immediately after the application launched with the user sign in/sign up.
#[tracing::instrument(level = "trace", skip(self), err)]
pub async fn initialize(&self, user_id: &str, token: &str) -> FlowyResult<()> {
let mut write_guard = INIT_FOLDER_FLAG.write().await;
if let Some(is_init) = write_guard.get(user_id) {
if *is_init {
return Ok(());
}
}
tracing::debug!("Initialize folder editor");
let folder_id = FolderId::new(user_id);
self.persistence.initialize(user_id, &folder_id).await?;
/// Called when the current user logout
///
pub async fn clear(&self, user_id: &str) {
self.view_controller.clear_latest_view();
clear_current_workspace(user_id);
*self.folder_editor.write().await = None;
}
let pool = self.persistence.db_pool()?;
let object_id = folder_id.as_ref();
let disk_cache = SQLiteFolderRevisionPersistence::new(user_id, pool.clone());
let configuration = RevisionPersistenceConfiguration::new(200, false);
let rev_persistence = RevisionPersistence::new(user_id, object_id, disk_cache, configuration);
let rev_compactor = FolderRevisionMergeable();
let snapshot_object_id = format!("folder:{}", object_id);
let snapshot_persistence =
SQLiteFolderRevisionSnapshotPersistence::new(&snapshot_object_id, pool);
let rev_manager = RevisionManager::new(
user_id,
folder_id.as_ref(),
rev_persistence,
rev_compactor,
snapshot_persistence,
);
let folder_editor = FolderEditor::new(
user_id,
&folder_id,
token,
rev_manager,
self.web_socket.clone(),
)
.await?;
*self.folder_editor.write().await = Some(Arc::new(folder_editor));
self.app_controller.initialize()?;
self.view_controller.initialize()?;
write_guard.insert(user_id.to_owned(), true);
Ok(())
}
pub async fn initialize_with_new_user(
&self,
user_id: &str,
token: &str,
view_data_format: ViewDataFormatPB,
) -> FlowyResult<()> {
DefaultFolderBuilder::build(
token,
user_id,
self.persistence.clone(),
self.view_controller.clone(),
|| (view_data_format.clone(), Bytes::from(initial_read_me())),
)
.await?;
self.initialize(user_id, token).await
}
/// Called when the current user logout
///
pub async fn clear(&self, user_id: &str) {
self.view_controller.clear_latest_view();
clear_current_workspace(user_id);
*self.folder_editor.write().await = None;
}
}
struct DefaultFolderBuilder();
impl DefaultFolderBuilder {
async fn build<F: Fn() -> (ViewDataFormatPB, Bytes)>(
token: &str,
user_id: &str,
persistence: Arc<FolderPersistence>,
view_controller: Arc<ViewController>,
create_view_fn: F,
) -> FlowyResult<()> {
let workspace_rev = user_default::create_default_workspace();
tracing::debug!("Create user:{} default workspace:{}", user_id, workspace_rev.id);
set_current_workspace(user_id, &workspace_rev.id);
for app in workspace_rev.apps.iter() {
for (index, view) in app.belongings.iter().enumerate() {
let (view_data_type, view_data) = create_view_fn();
if index == 0 {
let _ = view_controller.set_latest_view(&view.id);
let layout_type = ViewLayoutTypePB::from(view.layout.clone());
view_controller
.create_view(&view.id, view_data_type, layout_type, view_data)
.await?;
}
}
async fn build<F: Fn() -> (ViewDataFormatPB, Bytes)>(
token: &str,
user_id: &str,
persistence: Arc<FolderPersistence>,
view_controller: Arc<ViewController>,
create_view_fn: F,
) -> FlowyResult<()> {
let workspace_rev = user_default::create_default_workspace();
tracing::debug!(
"Create user:{} default workspace:{}",
user_id,
workspace_rev.id
);
set_current_workspace(user_id, &workspace_rev.id);
for app in workspace_rev.apps.iter() {
for (index, view) in app.belongings.iter().enumerate() {
let (view_data_type, view_data) = create_view_fn();
if index == 0 {
let _ = view_controller.set_latest_view(&view.id);
let layout_type = ViewLayoutTypePB::from(view.layout.clone());
view_controller
.create_view(&view.id, view_data_type, layout_type, view_data)
.await?;
}
let folder = FolderPad::new(vec![workspace_rev.clone()], vec![])?;
let folder_id = FolderId::new(user_id);
persistence.save_folder(user_id, &folder_id, folder).await?;
let repeated_workspace = RepeatedWorkspacePB {
items: vec![workspace_rev.into()],
};
send_notification(token, FolderNotification::DidCreateWorkspace)
.payload(repeated_workspace)
.send();
Ok(())
}
}
let folder = FolderPad::new(vec![workspace_rev.clone()], vec![])?;
let folder_id = FolderId::new(user_id);
persistence.save_folder(user_id, &folder_id, folder).await?;
let repeated_workspace = RepeatedWorkspacePB {
items: vec![workspace_rev.into()],
};
send_notification(token, FolderNotification::DidCreateWorkspace)
.payload(repeated_workspace)
.send();
Ok(())
}
}
#[cfg(feature = "flowy_unit_test")]
impl FolderManager {
pub async fn folder_editor(&self) -> Arc<FolderEditor> {
self.folder_editor.read().await.clone().unwrap()
}
pub async fn folder_editor(&self) -> Arc<FolderEditor> {
self.folder_editor.read().await.clone().unwrap()
}
}
pub trait ViewDataProcessor {
fn create_view(
&self,
user_id: &str,
view_id: &str,
layout: ViewLayoutTypePB,
view_data: Bytes,
) -> FutureResult<(), FlowyError>;
fn create_view(
&self,
user_id: &str,
view_id: &str,
layout: ViewLayoutTypePB,
view_data: Bytes,
) -> FutureResult<(), FlowyError>;
fn close_view(&self, view_id: &str) -> FutureResult<(), FlowyError>;
fn close_view(&self, view_id: &str) -> FutureResult<(), FlowyError>;
fn get_view_data(&self, view: &ViewPB) -> FutureResult<Bytes, FlowyError>;
fn get_view_data(&self, view: &ViewPB) -> FutureResult<Bytes, FlowyError>;
fn create_default_view(
&self,
user_id: &str,
view_id: &str,
layout: ViewLayoutTypePB,
data_format: ViewDataFormatPB,
) -> FutureResult<Bytes, FlowyError>;
fn create_default_view(
&self,
user_id: &str,
view_id: &str,
layout: ViewLayoutTypePB,
data_format: ViewDataFormatPB,
) -> FutureResult<Bytes, FlowyError>;
fn create_view_with_data(
&self,
user_id: &str,
view_id: &str,
data: Vec<u8>,
layout: ViewLayoutTypePB,
) -> FutureResult<Bytes, FlowyError>;
fn create_view_with_data(
&self,
user_id: &str,
view_id: &str,
data: Vec<u8>,
layout: ViewLayoutTypePB,
) -> FutureResult<Bytes, FlowyError>;
fn data_types(&self) -> Vec<ViewDataFormatPB>;
fn data_types(&self) -> Vec<ViewDataFormatPB>;
}
pub type ViewDataProcessorMap = Arc<HashMap<ViewDataFormatPB, Arc<dyn ViewDataProcessor + Send + Sync>>>;
pub type ViewDataProcessorMap =
Arc<HashMap<ViewDataFormatPB, Arc<dyn ViewDataProcessor + Send + Sync>>>;

View File

@ -4,49 +4,49 @@ const OBSERVABLE_CATEGORY: &str = "Workspace";
#[derive(ProtoBuf_Enum, Debug)]
pub(crate) enum FolderNotification {
Unknown = 0,
/// Trigger after creating a workspace
DidCreateWorkspace = 1,
/// Trigger after deleting a workspace
DidDeleteWorkspace = 2,
/// Trigger after updating a workspace
DidUpdateWorkspace = 3,
/// Trigger when the number of apps of the workspace is changed
DidUpdateWorkspaceApps = 4,
/// Trigger when the settings of the workspace are changed. The changes including the latest visiting view, etc
DidUpdateWorkspaceSetting = 5,
/// Trigger when the properties including rename,update description of the app are changed
DidUpdateApp = 20,
/// Trigger when the properties including rename,update description of the view are changed
DidUpdateView = 30,
/// Trigger after deleting the view
DidDeleteView = 31,
/// Trigger when restore the view from trash
DidRestoreView = 32,
/// Trigger after moving the view to trash
DidMoveViewToTrash = 33,
/// Trigger when the number of trash is changed
DidUpdateTrash = 34,
Unknown = 0,
/// Trigger after creating a workspace
DidCreateWorkspace = 1,
/// Trigger after deleting a workspace
DidDeleteWorkspace = 2,
/// Trigger after updating a workspace
DidUpdateWorkspace = 3,
/// Trigger when the number of apps of the workspace is changed
DidUpdateWorkspaceApps = 4,
/// Trigger when the settings of the workspace are changed. The changes including the latest visiting view, etc
DidUpdateWorkspaceSetting = 5,
/// Trigger when the properties including rename,update description of the app are changed
DidUpdateApp = 20,
/// Trigger when the properties including rename,update description of the view are changed
DidUpdateView = 30,
/// Trigger after deleting the view
DidDeleteView = 31,
/// Trigger when restore the view from trash
DidRestoreView = 32,
/// Trigger after moving the view to trash
DidMoveViewToTrash = 33,
/// Trigger when the number of trash is changed
DidUpdateTrash = 34,
}
impl std::default::Default for FolderNotification {
fn default() -> Self {
FolderNotification::Unknown
}
fn default() -> Self {
FolderNotification::Unknown
}
}
impl std::convert::From<FolderNotification> for i32 {
fn from(notification: FolderNotification) -> Self {
notification as i32
}
fn from(notification: FolderNotification) -> Self {
notification as i32
}
}
#[tracing::instrument(level = "trace")]
pub(crate) fn send_notification(id: &str, ty: FolderNotification) -> NotificationBuilder {
NotificationBuilder::new(id, ty, OBSERVABLE_CATEGORY)
NotificationBuilder::new(id, ty, OBSERVABLE_CATEGORY)
}
#[tracing::instrument(level = "trace")]
pub(crate) fn send_anonymous_notification(ty: FolderNotification) -> NotificationBuilder {
NotificationBuilder::new("", ty, OBSERVABLE_CATEGORY)
NotificationBuilder::new("", ty, OBSERVABLE_CATEGORY)
}

View File

@ -1,15 +1,15 @@
use crate::{
entities::{
app::{AppPB, CreateAppParams, *},
trash::TrashType,
},
errors::*,
event_map::{FolderCouldServiceV1, WorkspaceUser},
notification::*,
services::{
persistence::{AppChangeset, FolderPersistence, FolderPersistenceTransaction},
TrashController, TrashEvent,
},
entities::{
app::{AppPB, CreateAppParams, *},
trash::TrashType,
},
errors::*,
event_map::{FolderCouldServiceV1, WorkspaceUser},
notification::*,
services::{
persistence::{AppChangeset, FolderPersistence, FolderPersistenceTransaction},
TrashController, TrashEvent,
},
};
use folder_model::AppRevision;
@ -17,219 +17,239 @@ use futures::{FutureExt, StreamExt};
use std::{collections::HashSet, sync::Arc};
pub(crate) struct AppController {
user: Arc<dyn WorkspaceUser>,
persistence: Arc<FolderPersistence>,
trash_controller: Arc<TrashController>,
cloud_service: Arc<dyn FolderCouldServiceV1>,
}
impl AppController {
pub(crate) fn new(
user: Arc<dyn WorkspaceUser>,
persistence: Arc<FolderPersistence>,
trash_controller: Arc<TrashController>,
trash_can: Arc<TrashController>,
cloud_service: Arc<dyn FolderCouldServiceV1>,
}
) -> Self {
Self {
user,
persistence,
trash_controller: trash_can,
cloud_service,
}
}
impl AppController {
pub(crate) fn new(
user: Arc<dyn WorkspaceUser>,
persistence: Arc<FolderPersistence>,
trash_can: Arc<TrashController>,
cloud_service: Arc<dyn FolderCouldServiceV1>,
) -> Self {
Self {
user,
persistence,
trash_controller: trash_can,
cloud_service,
pub fn initialize(&self) -> Result<(), FlowyError> {
self.listen_trash_controller_event();
Ok(())
}
#[tracing::instrument(level = "debug", skip(self, params), fields(name = %params.name) err)]
pub(crate) async fn create_app_from_params(
&self,
params: CreateAppParams,
) -> Result<AppPB, FlowyError> {
let app = self.create_app_on_server(params).await?;
self.create_app_on_local(app).await
}
pub(crate) async fn create_app_on_local(&self, app: AppRevision) -> Result<AppPB, FlowyError> {
self
.persistence
.begin_transaction(|transaction| {
transaction.create_app(app.clone())?;
notify_apps_changed(
&app.workspace_id,
self.trash_controller.clone(),
&transaction,
)?;
Ok(())
})
.await?;
Ok(app.into())
}
pub(crate) async fn read_app(&self, params: AppIdPB) -> Result<Option<AppRevision>, FlowyError> {
let app = self
.persistence
.begin_transaction(|transaction| {
let app = transaction.read_app(&params.value)?;
let trash_ids = self.trash_controller.read_trash_ids(&transaction)?;
if trash_ids.contains(&app.id) {
return Ok(None);
}
}
Ok(Some(app))
})
.await?;
Ok(app)
}
pub fn initialize(&self) -> Result<(), FlowyError> {
self.listen_trash_controller_event();
Ok(())
}
pub(crate) async fn update_app(&self, params: UpdateAppParams) -> Result<(), FlowyError> {
let changeset = AppChangeset::new(params.clone());
let app_id = changeset.id.clone();
#[tracing::instrument(level = "debug", skip(self, params), fields(name = %params.name) err)]
pub(crate) async fn create_app_from_params(&self, params: CreateAppParams) -> Result<AppPB, FlowyError> {
let app = self.create_app_on_server(params).await?;
self.create_app_on_local(app).await
}
pub(crate) async fn create_app_on_local(&self, app: AppRevision) -> Result<AppPB, FlowyError> {
self.persistence
.begin_transaction(|transaction| {
transaction.create_app(app.clone())?;
notify_apps_changed(&app.workspace_id, self.trash_controller.clone(), &transaction)?;
Ok(())
})
.await?;
Ok(app.into())
}
pub(crate) async fn read_app(&self, params: AppIdPB) -> Result<Option<AppRevision>, FlowyError> {
let app = self
.persistence
.begin_transaction(|transaction| {
let app = transaction.read_app(&params.value)?;
let trash_ids = self.trash_controller.read_trash_ids(&transaction)?;
if trash_ids.contains(&app.id) {
return Ok(None);
}
Ok(Some(app))
})
.await?;
let app: AppPB = self
.persistence
.begin_transaction(|transaction| {
transaction.update_app(changeset)?;
let app = transaction.read_app(&app_id)?;
Ok(app)
}
})
.await?
.into();
send_notification(&app_id, FolderNotification::DidUpdateApp)
.payload(app)
.send();
self.update_app_on_server(params)?;
Ok(())
}
pub(crate) async fn update_app(&self, params: UpdateAppParams) -> Result<(), FlowyError> {
let changeset = AppChangeset::new(params.clone());
let app_id = changeset.id.clone();
let app: AppPB = self
.persistence
.begin_transaction(|transaction| {
transaction.update_app(changeset)?;
let app = transaction.read_app(&app_id)?;
Ok(app)
})
.await?
.into();
send_notification(&app_id, FolderNotification::DidUpdateApp)
.payload(app)
.send();
self.update_app_on_server(params)?;
pub(crate) async fn move_app(&self, app_id: &str, from: usize, to: usize) -> FlowyResult<()> {
self
.persistence
.begin_transaction(|transaction| {
transaction.move_app(app_id, from, to)?;
let app = transaction.read_app(app_id)?;
notify_apps_changed(
&app.workspace_id,
self.trash_controller.clone(),
&transaction,
)?;
Ok(())
}
})
.await?;
Ok(())
}
pub(crate) async fn move_app(&self, app_id: &str, from: usize, to: usize) -> FlowyResult<()> {
self.persistence
.begin_transaction(|transaction| {
transaction.move_app(app_id, from, to)?;
let app = transaction.read_app(app_id)?;
notify_apps_changed(&app.workspace_id, self.trash_controller.clone(), &transaction)?;
Ok(())
})
.await?;
Ok(())
}
pub(crate) async fn read_local_apps(&self, ids: Vec<String>) -> Result<Vec<AppRevision>, FlowyError> {
let app_revs = self
.persistence
.begin_transaction(|transaction| {
let mut apps = vec![];
for id in ids {
apps.push(transaction.read_app(&id)?);
}
Ok(apps)
})
.await?;
Ok(app_revs)
}
pub(crate) async fn read_local_apps(
&self,
ids: Vec<String>,
) -> Result<Vec<AppRevision>, FlowyError> {
let app_revs = self
.persistence
.begin_transaction(|transaction| {
let mut apps = vec![];
for id in ids {
apps.push(transaction.read_app(&id)?);
}
Ok(apps)
})
.await?;
Ok(app_revs)
}
}
impl AppController {
#[tracing::instrument(level = "trace", skip(self), err)]
async fn create_app_on_server(&self, params: CreateAppParams) -> Result<AppRevision, FlowyError> {
let token = self.user.token()?;
let app = self.cloud_service.create_app(&token, params).await?;
Ok(app)
}
#[tracing::instrument(level = "trace", skip(self), err)]
async fn create_app_on_server(&self, params: CreateAppParams) -> Result<AppRevision, FlowyError> {
let token = self.user.token()?;
let app = self.cloud_service.create_app(&token, params).await?;
Ok(app)
}
#[tracing::instrument(level = "trace", skip(self), err)]
fn update_app_on_server(&self, params: UpdateAppParams) -> Result<(), FlowyError> {
let token = self.user.token()?;
let server = self.cloud_service.clone();
tokio::spawn(async move {
match server.update_app(&token, params).await {
Ok(_) => {}
Err(e) => {
// TODO: retry?
log::error!("Update app failed: {:?}", e);
}
}
});
Ok(())
}
#[tracing::instrument(level = "trace", skip(self), err)]
fn update_app_on_server(&self, params: UpdateAppParams) -> Result<(), FlowyError> {
let token = self.user.token()?;
let server = self.cloud_service.clone();
tokio::spawn(async move {
match server.update_app(&token, params).await {
Ok(_) => {},
Err(e) => {
// TODO: retry?
log::error!("Update app failed: {:?}", e);
},
}
});
Ok(())
}
fn listen_trash_controller_event(&self) {
let mut rx = self.trash_controller.subscribe();
let persistence = self.persistence.clone();
let trash_controller = self.trash_controller.clone();
let _ = tokio::spawn(async move {
loop {
let mut stream = Box::pin(rx.recv().into_stream().filter_map(|result| async move {
match result {
Ok(event) => event.select(TrashType::TrashApp),
Err(_e) => None,
}
}));
if let Some(event) = stream.next().await {
handle_trash_event(persistence.clone(), trash_controller.clone(), event).await
}
}
});
}
fn listen_trash_controller_event(&self) {
let mut rx = self.trash_controller.subscribe();
let persistence = self.persistence.clone();
let trash_controller = self.trash_controller.clone();
let _ = tokio::spawn(async move {
loop {
let mut stream = Box::pin(rx.recv().into_stream().filter_map(|result| async move {
match result {
Ok(event) => event.select(TrashType::TrashApp),
Err(_e) => None,
}
}));
if let Some(event) = stream.next().await {
handle_trash_event(persistence.clone(), trash_controller.clone(), event).await
}
}
});
}
}
#[tracing::instrument(level = "trace", skip(persistence, trash_controller))]
async fn handle_trash_event(
persistence: Arc<FolderPersistence>,
trash_controller: Arc<TrashController>,
event: TrashEvent,
persistence: Arc<FolderPersistence>,
trash_controller: Arc<TrashController>,
event: TrashEvent,
) {
match event {
TrashEvent::NewTrash(identifiers, ret) | TrashEvent::Putback(identifiers, ret) => {
let result = persistence
.begin_transaction(|transaction| {
for identifier in identifiers.items {
let app = transaction.read_app(&identifier.id)?;
notify_apps_changed(&app.workspace_id, trash_controller.clone(), &transaction)?;
}
Ok(())
})
.await;
let _ = ret.send(result).await;
}
TrashEvent::Delete(identifiers, ret) => {
let result = persistence
.begin_transaction(|transaction| {
let mut notify_ids = HashSet::new();
for identifier in identifiers.items {
let app = transaction.read_app(&identifier.id)?;
let _ = transaction.delete_app(&identifier.id)?;
notify_ids.insert(app.workspace_id);
}
match event {
TrashEvent::NewTrash(identifiers, ret) | TrashEvent::Putback(identifiers, ret) => {
let result = persistence
.begin_transaction(|transaction| {
for identifier in identifiers.items {
let app = transaction.read_app(&identifier.id)?;
notify_apps_changed(&app.workspace_id, trash_controller.clone(), &transaction)?;
}
Ok(())
})
.await;
let _ = ret.send(result).await;
},
TrashEvent::Delete(identifiers, ret) => {
let result = persistence
.begin_transaction(|transaction| {
let mut notify_ids = HashSet::new();
for identifier in identifiers.items {
let app = transaction.read_app(&identifier.id)?;
let _ = transaction.delete_app(&identifier.id)?;
notify_ids.insert(app.workspace_id);
}
for notify_id in notify_ids {
notify_apps_changed(&notify_id, trash_controller.clone(), &transaction)?;
}
Ok(())
})
.await;
let _ = ret.send(result).await;
}
}
for notify_id in notify_ids {
notify_apps_changed(&notify_id, trash_controller.clone(), &transaction)?;
}
Ok(())
})
.await;
let _ = ret.send(result).await;
},
}
}
#[tracing::instrument(level = "debug", skip(workspace_id, trash_controller, transaction), err)]
#[tracing::instrument(
level = "debug",
skip(workspace_id, trash_controller, transaction),
err
)]
fn notify_apps_changed<'a>(
workspace_id: &str,
trash_controller: Arc<TrashController>,
transaction: &'a (dyn FolderPersistenceTransaction + 'a),
workspace_id: &str,
trash_controller: Arc<TrashController>,
transaction: &'a (dyn FolderPersistenceTransaction + 'a),
) -> FlowyResult<()> {
let items = read_workspace_apps(workspace_id, trash_controller, transaction)?
.into_iter()
.map(|app_rev| app_rev.into())
.collect();
let repeated_app = RepeatedAppPB { items };
send_notification(workspace_id, FolderNotification::DidUpdateWorkspaceApps)
.payload(repeated_app)
.send();
Ok(())
let items = read_workspace_apps(workspace_id, trash_controller, transaction)?
.into_iter()
.map(|app_rev| app_rev.into())
.collect();
let repeated_app = RepeatedAppPB { items };
send_notification(workspace_id, FolderNotification::DidUpdateWorkspaceApps)
.payload(repeated_app)
.send();
Ok(())
}
pub fn read_workspace_apps<'a>(
workspace_id: &str,
trash_controller: Arc<TrashController>,
transaction: &'a (dyn FolderPersistenceTransaction + 'a),
workspace_id: &str,
trash_controller: Arc<TrashController>,
transaction: &'a (dyn FolderPersistenceTransaction + 'a),
) -> Result<Vec<AppRevision>, FlowyError> {
let mut app_revs = transaction.read_workspace_apps(workspace_id)?;
let trash_ids = trash_controller.read_trash_ids(transaction)?;
app_revs.retain(|app| !trash_ids.contains(&app.id));
Ok(app_revs)
let mut app_revs = transaction.read_workspace_apps(workspace_id)?;
let trash_ids = trash_controller.read_trash_ids(transaction)?;
app_revs.retain(|app| !trash_ids.contains(&app.id));
Ok(app_revs)
}

View File

@ -1,60 +1,62 @@
use crate::{
entities::app::{AppIdPB, AppPB, CreateAppParams, CreateAppPayloadPB, UpdateAppParams, UpdateAppPayloadPB},
errors::FlowyError,
services::{AppController, TrashController, ViewController},
entities::app::{
AppIdPB, AppPB, CreateAppParams, CreateAppPayloadPB, UpdateAppParams, UpdateAppPayloadPB,
},
errors::FlowyError,
services::{AppController, TrashController, ViewController},
};
use folder_model::TrashRevision;
use lib_dispatch::prelude::{data_result, AFPluginData, AFPluginState, DataResult};
use std::{convert::TryInto, sync::Arc};
pub(crate) async fn create_app_handler(
data: AFPluginData<CreateAppPayloadPB>,
controller: AFPluginState<Arc<AppController>>,
data: AFPluginData<CreateAppPayloadPB>,
controller: AFPluginState<Arc<AppController>>,
) -> DataResult<AppPB, FlowyError> {
let params: CreateAppParams = data.into_inner().try_into()?;
let detail = controller.create_app_from_params(params).await?;
let params: CreateAppParams = data.into_inner().try_into()?;
let detail = controller.create_app_from_params(params).await?;
data_result(detail)
data_result(detail)
}
pub(crate) async fn delete_app_handler(
data: AFPluginData<AppIdPB>,
app_controller: AFPluginState<Arc<AppController>>,
trash_controller: AFPluginState<Arc<TrashController>>,
data: AFPluginData<AppIdPB>,
app_controller: AFPluginState<Arc<AppController>>,
trash_controller: AFPluginState<Arc<TrashController>>,
) -> Result<(), FlowyError> {
let params: AppIdPB = data.into_inner();
let trash = app_controller
.read_local_apps(vec![params.value])
.await?
.into_iter()
.map(|app_rev| app_rev.into())
.collect::<Vec<TrashRevision>>();
let params: AppIdPB = data.into_inner();
let trash = app_controller
.read_local_apps(vec![params.value])
.await?
.into_iter()
.map(|app_rev| app_rev.into())
.collect::<Vec<TrashRevision>>();
trash_controller.add(trash).await?;
Ok(())
trash_controller.add(trash).await?;
Ok(())
}
#[tracing::instrument(level = "trace", skip(data, controller))]
pub(crate) async fn update_app_handler(
data: AFPluginData<UpdateAppPayloadPB>,
controller: AFPluginState<Arc<AppController>>,
data: AFPluginData<UpdateAppPayloadPB>,
controller: AFPluginState<Arc<AppController>>,
) -> Result<(), FlowyError> {
let params: UpdateAppParams = data.into_inner().try_into()?;
controller.update_app(params).await?;
Ok(())
let params: UpdateAppParams = data.into_inner().try_into()?;
controller.update_app(params).await?;
Ok(())
}
#[tracing::instrument(level = "trace", skip(data, app_controller, view_controller), err)]
pub(crate) async fn read_app_handler(
data: AFPluginData<AppIdPB>,
app_controller: AFPluginState<Arc<AppController>>,
view_controller: AFPluginState<Arc<ViewController>>,
data: AFPluginData<AppIdPB>,
app_controller: AFPluginState<Arc<AppController>>,
view_controller: AFPluginState<Arc<ViewController>>,
) -> DataResult<AppPB, FlowyError> {
let params: AppIdPB = data.into_inner();
if let Some(mut app_rev) = app_controller.read_app(params.clone()).await? {
app_rev.belongings = view_controller.read_views_belong_to(&params.value).await?;
data_result(app_rev.into())
} else {
Err(FlowyError::record_not_found())
}
let params: AppIdPB = data.into_inner();
if let Some(mut app_rev) = app_controller.read_app(params.clone()).await? {
app_rev.belongings = view_controller.read_views_belong_to(&params.value).await?;
data_result(app_rev.into())
} else {
Err(FlowyError::record_not_found())
}
}

View File

@ -5,8 +5,8 @@ use flowy_client_sync::make_operations_from_revisions;
use flowy_client_sync::util::recover_operation_from_revisions;
use flowy_error::{FlowyError, FlowyResult};
use flowy_revision::{
RevisionCloudService, RevisionManager, RevisionMergeable, RevisionObjectDeserializer, RevisionObjectSerializer,
RevisionWebSocket,
RevisionCloudService, RevisionManager, RevisionMergeable, RevisionObjectDeserializer,
RevisionObjectSerializer, RevisionWebSocket,
};
use flowy_sqlite::ConnectionPool;
use lib_infra::future::FutureResult;
@ -17,137 +17,154 @@ use std::sync::Arc;
use ws_model::ws_revision::ServerRevisionWSData;
pub struct FolderEditor {
#[allow(dead_code)]
user_id: String,
#[allow(dead_code)]
folder_id: FolderId,
pub(crate) folder: Arc<RwLock<FolderPad>>,
rev_manager: Arc<RevisionManager<Arc<ConnectionPool>>>,
#[cfg(feature = "sync")]
ws_manager: Arc<flowy_revision::RevisionWebSocketManager>,
#[allow(dead_code)]
user_id: String,
#[allow(dead_code)]
folder_id: FolderId,
pub(crate) folder: Arc<RwLock<FolderPad>>,
rev_manager: Arc<RevisionManager<Arc<ConnectionPool>>>,
#[cfg(feature = "sync")]
ws_manager: Arc<flowy_revision::RevisionWebSocketManager>,
}
impl FolderEditor {
#[allow(unused_variables)]
pub async fn new(
user_id: &str,
folder_id: &FolderId,
token: &str,
mut rev_manager: RevisionManager<Arc<ConnectionPool>>,
web_socket: Arc<dyn RevisionWebSocket>,
) -> FlowyResult<Self> {
let cloud = Arc::new(FolderRevisionCloudService {
token: token.to_string(),
});
let folder = Arc::new(RwLock::new(
rev_manager.initialize::<FolderRevisionSerde>(Some(cloud)).await?,
));
let rev_manager = Arc::new(rev_manager);
#[cfg(feature = "sync")]
let ws_manager = crate::services::web_socket::make_folder_ws_manager(
user_id,
folder_id.as_ref(),
rev_manager.clone(),
web_socket,
folder.clone(),
)
.await;
let user_id = user_id.to_owned();
let folder_id = folder_id.to_owned();
Ok(Self {
user_id,
folder_id,
folder,
rev_manager,
#[cfg(feature = "sync")]
ws_manager,
})
}
#[allow(unused_variables)]
pub async fn new(
user_id: &str,
folder_id: &FolderId,
token: &str,
mut rev_manager: RevisionManager<Arc<ConnectionPool>>,
web_socket: Arc<dyn RevisionWebSocket>,
) -> FlowyResult<Self> {
let cloud = Arc::new(FolderRevisionCloudService {
token: token.to_string(),
});
let folder = Arc::new(RwLock::new(
rev_manager
.initialize::<FolderRevisionSerde>(Some(cloud))
.await?,
));
let rev_manager = Arc::new(rev_manager);
#[cfg(feature = "sync")]
pub async fn receive_ws_data(&self, data: ServerRevisionWSData) -> FlowyResult<()> {
let _ = self.ws_manager.ws_passthrough_tx.send(data).await.map_err(|e| {
let err_msg = format!("{} passthrough error: {}", self.folder_id, e);
FlowyError::internal().context(err_msg)
})?;
let ws_manager = crate::services::web_socket::make_folder_ws_manager(
user_id,
folder_id.as_ref(),
rev_manager.clone(),
web_socket,
folder.clone(),
)
.await;
Ok(())
}
let user_id = user_id.to_owned();
let folder_id = folder_id.to_owned();
Ok(Self {
user_id,
folder_id,
folder,
rev_manager,
#[cfg(feature = "sync")]
ws_manager,
})
}
#[cfg(not(feature = "sync"))]
pub async fn receive_ws_data(&self, _data: ServerRevisionWSData) -> FlowyResult<()> {
Ok(())
}
#[cfg(feature = "sync")]
pub async fn receive_ws_data(&self, data: ServerRevisionWSData) -> FlowyResult<()> {
let _ = self
.ws_manager
.ws_passthrough_tx
.send(data)
.await
.map_err(|e| {
let err_msg = format!("{} passthrough error: {}", self.folder_id, e);
FlowyError::internal().context(err_msg)
})?;
pub(crate) fn apply_change(&self, change: FolderChangeset) -> FlowyResult<()> {
let FolderChangeset { operations: delta, md5 } = change;
let delta_data = delta.json_bytes();
let rev_manager = self.rev_manager.clone();
tokio::spawn(async move {
let _ = rev_manager.add_local_revision(delta_data, md5).await;
});
Ok(())
}
Ok(())
}
#[allow(dead_code)]
pub fn folder_json(&self) -> FlowyResult<String> {
let json = self.folder.read().to_json()?;
Ok(json)
}
#[cfg(not(feature = "sync"))]
pub async fn receive_ws_data(&self, _data: ServerRevisionWSData) -> FlowyResult<()> {
Ok(())
}
pub(crate) fn apply_change(&self, change: FolderChangeset) -> FlowyResult<()> {
let FolderChangeset {
operations: delta,
md5,
} = change;
let delta_data = delta.json_bytes();
let rev_manager = self.rev_manager.clone();
tokio::spawn(async move {
let _ = rev_manager.add_local_revision(delta_data, md5).await;
});
Ok(())
}
#[allow(dead_code)]
pub fn folder_json(&self) -> FlowyResult<String> {
let json = self.folder.read().to_json()?;
Ok(json)
}
}
struct FolderRevisionSerde();
impl RevisionObjectDeserializer for FolderRevisionSerde {
type Output = FolderPad;
type Output = FolderPad;
fn deserialize_revisions(_object_id: &str, revisions: Vec<Revision>) -> FlowyResult<Self::Output> {
let operations: FolderOperations = make_operations_from_revisions(revisions)?;
Ok(FolderPad::from_operations(operations)?)
}
fn deserialize_revisions(
_object_id: &str,
revisions: Vec<Revision>,
) -> FlowyResult<Self::Output> {
let operations: FolderOperations = make_operations_from_revisions(revisions)?;
Ok(FolderPad::from_operations(operations)?)
}
fn recover_from_revisions(revisions: Vec<Revision>) -> Option<(Self::Output, i64)> {
if let Some((operations, rev_id)) = recover_operation_from_revisions(revisions, |operations| {
FolderPad::from_operations(operations.clone()).is_ok()
}) {
if let Ok(pad) = FolderPad::from_operations(operations) {
return Some((pad, rev_id));
}
}
None
fn recover_from_revisions(revisions: Vec<Revision>) -> Option<(Self::Output, i64)> {
if let Some((operations, rev_id)) = recover_operation_from_revisions(revisions, |operations| {
FolderPad::from_operations(operations.clone()).is_ok()
}) {
if let Ok(pad) = FolderPad::from_operations(operations) {
return Some((pad, rev_id));
}
}
None
}
}
impl RevisionObjectSerializer for FolderRevisionSerde {
fn combine_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
let operations = make_operations_from_revisions::<EmptyAttributes>(revisions)?;
Ok(operations.json_bytes())
}
fn combine_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
let operations = make_operations_from_revisions::<EmptyAttributes>(revisions)?;
Ok(operations.json_bytes())
}
}
pub struct FolderRevisionMergeable();
impl RevisionMergeable for FolderRevisionMergeable {
fn combine_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
FolderRevisionSerde::combine_revisions(revisions)
}
fn combine_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
FolderRevisionSerde::combine_revisions(revisions)
}
}
struct FolderRevisionCloudService {
#[allow(dead_code)]
token: String,
#[allow(dead_code)]
token: String,
}
impl RevisionCloudService for FolderRevisionCloudService {
#[tracing::instrument(level = "trace", skip(self))]
fn fetch_object(&self, _user_id: &str, _object_id: &str) -> FutureResult<Vec<Revision>, FlowyError> {
FutureResult::new(async move { Ok(vec![]) })
}
#[tracing::instrument(level = "trace", skip(self))]
fn fetch_object(
&self,
_user_id: &str,
_object_id: &str,
) -> FutureResult<Vec<Revision>, FlowyError> {
FutureResult::new(async move { Ok(vec![]) })
}
}
#[cfg(feature = "flowy_unit_test")]
impl FolderEditor {
pub fn rev_manager(&self) -> Arc<RevisionManager<Arc<ConnectionPool>>> {
self.rev_manager.clone()
}
pub fn rev_manager(&self) -> Arc<RevisionManager<Arc<ConnectionPool>>> {
self.rev_manager.clone()
}
}

View File

@ -1,7 +1,7 @@
use crate::manager::FolderId;
use crate::{
event_map::WorkspaceDatabase,
services::persistence::{AppTableSql, TrashTableSql, ViewTableSql, WorkspaceTableSql},
event_map::WorkspaceDatabase,
services::persistence::{AppTableSql, TrashTableSql, ViewTableSql, WorkspaceTableSql},
};
use bytes::Bytes;
use flowy_client_sync::client_folder::FolderPad;
@ -21,134 +21,137 @@ const V2_MIGRATION: &str = "FOLDER_V2_MIGRATION";
const V3_MIGRATION: &str = "FOLDER_V3_MIGRATION";
pub(crate) struct FolderMigration {
user_id: String,
database: Arc<dyn WorkspaceDatabase>,
user_id: String,
database: Arc<dyn WorkspaceDatabase>,
}
impl FolderMigration {
pub fn new(user_id: &str, database: Arc<dyn WorkspaceDatabase>) -> Self {
Self {
user_id: user_id.to_owned(),
database,
}
pub fn new(user_id: &str, database: Arc<dyn WorkspaceDatabase>) -> Self {
Self {
user_id: user_id.to_owned(),
database,
}
}
pub fn run_v1_migration(&self) -> FlowyResult<Option<FolderPad>> {
let key = migration_flag_key(&self.user_id, V1_MIGRATION);
if KV::get_bool(&key) {
return Ok(None);
}
pub fn run_v1_migration(&self) -> FlowyResult<Option<FolderPad>> {
let key = migration_flag_key(&self.user_id, V1_MIGRATION);
if KV::get_bool(&key) {
return Ok(None);
let pool = self.database.db_pool()?;
let conn = &*pool.get()?;
let workspaces = conn.immediate_transaction::<_, FlowyError, _>(|| {
let mut workspaces = WorkspaceTableSql::read_workspaces(&self.user_id, None, conn)?
.into_iter()
.map(WorkspaceRevision::from)
.collect::<Vec<_>>();
for workspace in workspaces.iter_mut() {
let mut apps = AppTableSql::read_workspace_apps(&workspace.id, conn)?
.into_iter()
.map(AppRevision::from)
.collect::<Vec<_>>();
for app in apps.iter_mut() {
let views = ViewTableSql::read_views(&app.id, conn)?
.into_iter()
.map(ViewRevision::from)
.collect::<Vec<_>>();
app.belongings = views;
}
let pool = self.database.db_pool()?;
let conn = &*pool.get()?;
let workspaces = conn.immediate_transaction::<_, FlowyError, _>(|| {
let mut workspaces = WorkspaceTableSql::read_workspaces(&self.user_id, None, conn)?
.into_iter()
.map(WorkspaceRevision::from)
.collect::<Vec<_>>();
workspace.apps = apps;
}
Ok(workspaces)
})?;
for workspace in workspaces.iter_mut() {
let mut apps = AppTableSql::read_workspace_apps(&workspace.id, conn)?
.into_iter()
.map(AppRevision::from)
.collect::<Vec<_>>();
for app in apps.iter_mut() {
let views = ViewTableSql::read_views(&app.id, conn)?
.into_iter()
.map(ViewRevision::from)
.collect::<Vec<_>>();
app.belongings = views;
}
workspace.apps = apps;
}
Ok(workspaces)
})?;
if workspaces.is_empty() {
tracing::trace!("Run folder v1 migration, but workspace is empty");
KV::set_bool(&key, true);
return Ok(None);
}
let trash = conn.immediate_transaction::<_, FlowyError, _>(|| {
let trash = TrashTableSql::read_all(conn)?;
Ok(trash)
})?;
let folder = FolderPad::new(workspaces, trash)?;
KV::set_bool(&key, true);
tracing::info!("Run folder v1 migration");
Ok(Some(folder))
if workspaces.is_empty() {
tracing::trace!("Run folder v1 migration, but workspace is empty");
KV::set_bool(&key, true);
return Ok(None);
}
pub async fn run_v2_migration(&self, folder_id: &FolderId) -> FlowyResult<()> {
let key = migration_flag_key(&self.user_id, V2_MIGRATION);
if KV::get_bool(&key) {
return Ok(());
}
self.migration_folder_rev_struct(folder_id).await?;
KV::set_bool(&key, true);
// tracing::info!("Run folder v2 migration");
Ok(())
}
let trash = conn.immediate_transaction::<_, FlowyError, _>(|| {
let trash = TrashTableSql::read_all(conn)?;
Ok(trash)
})?;
pub async fn run_v3_migration(&self, folder_id: &FolderId) -> FlowyResult<()> {
let key = migration_flag_key(&self.user_id, V3_MIGRATION);
if KV::get_bool(&key) {
return Ok(());
}
self.migration_folder_rev_struct(folder_id).await?;
KV::set_bool(&key, true);
tracing::trace!("Run folder v3 migration");
Ok(())
}
let folder = FolderPad::new(workspaces, trash)?;
KV::set_bool(&key, true);
tracing::info!("Run folder v1 migration");
Ok(Some(folder))
}
pub async fn migration_folder_rev_struct(&self, folder_id: &FolderId) -> FlowyResult<()> {
let object = FolderRevisionResettable {
folder_id: folder_id.as_ref().to_owned(),
};
let pool = self.database.db_pool()?;
let disk_cache = SQLiteFolderRevisionPersistence::new(&self.user_id, pool);
let reset = RevisionStructReset::new(&self.user_id, object, Arc::new(disk_cache));
reset.run().await
pub async fn run_v2_migration(&self, folder_id: &FolderId) -> FlowyResult<()> {
let key = migration_flag_key(&self.user_id, V2_MIGRATION);
if KV::get_bool(&key) {
return Ok(());
}
self.migration_folder_rev_struct(folder_id).await?;
KV::set_bool(&key, true);
// tracing::info!("Run folder v2 migration");
Ok(())
}
pub async fn run_v3_migration(&self, folder_id: &FolderId) -> FlowyResult<()> {
let key = migration_flag_key(&self.user_id, V3_MIGRATION);
if KV::get_bool(&key) {
return Ok(());
}
self.migration_folder_rev_struct(folder_id).await?;
KV::set_bool(&key, true);
tracing::trace!("Run folder v3 migration");
Ok(())
}
pub async fn migration_folder_rev_struct(&self, folder_id: &FolderId) -> FlowyResult<()> {
let object = FolderRevisionResettable {
folder_id: folder_id.as_ref().to_owned(),
};
let pool = self.database.db_pool()?;
let disk_cache = SQLiteFolderRevisionPersistence::new(&self.user_id, pool);
let reset = RevisionStructReset::new(&self.user_id, object, Arc::new(disk_cache));
reset.run().await
}
}
fn migration_flag_key(user_id: &str, version: &str) -> String {
md5(format!("{}{}", user_id, version,))
md5(format!("{}{}", user_id, version,))
}
struct FolderRevisionResettable {
folder_id: String,
folder_id: String,
}
impl RevisionResettable for FolderRevisionResettable {
fn target_id(&self) -> &str {
&self.folder_id
}
fn target_id(&self) -> &str {
&self.folder_id
}
fn reset_data(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
let pad = FolderPad::from_revisions(revisions)?;
let json = pad.to_json()?;
let bytes = FolderOperationsBuilder::new().insert(&json).build().json_bytes();
Ok(bytes)
}
fn reset_data(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
let pad = FolderPad::from_revisions(revisions)?;
let json = pad.to_json()?;
let bytes = FolderOperationsBuilder::new()
.insert(&json)
.build()
.json_bytes();
Ok(bytes)
}
fn default_target_rev_str(&self) -> FlowyResult<String> {
let folder = FolderRevision::default();
let json = make_folder_rev_json_str(&folder)?;
Ok(json)
}
fn default_target_rev_str(&self) -> FlowyResult<String> {
let folder = FolderRevision::default();
let json = make_folder_rev_json_str(&folder)?;
Ok(json)
}
fn read_record(&self) -> Option<String> {
KV::get_str(self.target_id())
}
fn read_record(&self) -> Option<String> {
KV::get_str(self.target_id())
}
fn set_record(&self, record: String) {
KV::set_str(self.target_id(), record);
}
fn set_record(&self, record: String) {
KV::set_str(self.target_id(), record);
}
}

View File

@ -5,9 +5,9 @@ mod version_2;
use crate::services::persistence::rev_sqlite::SQLiteFolderRevisionPersistence;
use crate::{
event_map::WorkspaceDatabase,
manager::FolderId,
services::{folder_editor::FolderEditor, persistence::migration::FolderMigration},
event_map::WorkspaceDatabase,
manager::FolderId,
services::{folder_editor::FolderEditor, persistence::migration::FolderMigration},
};
use flowy_client_sync::client_folder::{FolderOperationsBuilder, FolderPad};
use flowy_error::{FlowyError, FlowyResult};
@ -17,114 +17,135 @@ use folder_model::{AppRevision, TrashRevision, ViewRevision, WorkspaceRevision};
use revision_model::Revision;
use std::sync::Arc;
use tokio::sync::RwLock;
pub use version_1::{app_sql::*, trash_sql::*, v1_impl::V1Transaction, view_sql::*, workspace_sql::*};
pub use version_1::{
app_sql::*, trash_sql::*, v1_impl::V1Transaction, view_sql::*, workspace_sql::*,
};
pub trait FolderPersistenceTransaction {
fn create_workspace(&self, user_id: &str, workspace_rev: WorkspaceRevision) -> FlowyResult<()>;
fn read_workspaces(&self, user_id: &str, workspace_id: Option<String>) -> FlowyResult<Vec<WorkspaceRevision>>;
fn update_workspace(&self, changeset: WorkspaceChangeset) -> FlowyResult<()>;
fn delete_workspace(&self, workspace_id: &str) -> FlowyResult<()>;
fn create_workspace(&self, user_id: &str, workspace_rev: WorkspaceRevision) -> FlowyResult<()>;
fn read_workspaces(
&self,
user_id: &str,
workspace_id: Option<String>,
) -> FlowyResult<Vec<WorkspaceRevision>>;
fn update_workspace(&self, changeset: WorkspaceChangeset) -> FlowyResult<()>;
fn delete_workspace(&self, workspace_id: &str) -> FlowyResult<()>;
fn create_app(&self, app_rev: AppRevision) -> FlowyResult<()>;
fn update_app(&self, changeset: AppChangeset) -> FlowyResult<()>;
fn read_app(&self, app_id: &str) -> FlowyResult<AppRevision>;
fn read_workspace_apps(&self, workspace_id: &str) -> FlowyResult<Vec<AppRevision>>;
fn delete_app(&self, app_id: &str) -> FlowyResult<AppRevision>;
fn move_app(&self, app_id: &str, from: usize, to: usize) -> FlowyResult<()>;
fn create_app(&self, app_rev: AppRevision) -> FlowyResult<()>;
fn update_app(&self, changeset: AppChangeset) -> FlowyResult<()>;
fn read_app(&self, app_id: &str) -> FlowyResult<AppRevision>;
fn read_workspace_apps(&self, workspace_id: &str) -> FlowyResult<Vec<AppRevision>>;
fn delete_app(&self, app_id: &str) -> FlowyResult<AppRevision>;
fn move_app(&self, app_id: &str, from: usize, to: usize) -> FlowyResult<()>;
fn create_view(&self, view_rev: ViewRevision) -> FlowyResult<()>;
fn read_view(&self, view_id: &str) -> FlowyResult<ViewRevision>;
fn read_views(&self, belong_to_id: &str) -> FlowyResult<Vec<ViewRevision>>;
fn update_view(&self, changeset: ViewChangeset) -> FlowyResult<()>;
fn delete_view(&self, view_id: &str) -> FlowyResult<ViewRevision>;
fn move_view(&self, view_id: &str, from: usize, to: usize) -> FlowyResult<()>;
fn create_view(&self, view_rev: ViewRevision) -> FlowyResult<()>;
fn read_view(&self, view_id: &str) -> FlowyResult<ViewRevision>;
fn read_views(&self, belong_to_id: &str) -> FlowyResult<Vec<ViewRevision>>;
fn update_view(&self, changeset: ViewChangeset) -> FlowyResult<()>;
fn delete_view(&self, view_id: &str) -> FlowyResult<ViewRevision>;
fn move_view(&self, view_id: &str, from: usize, to: usize) -> FlowyResult<()>;
fn create_trash(&self, trashes: Vec<TrashRevision>) -> FlowyResult<()>;
fn read_trash(&self, trash_id: Option<String>) -> FlowyResult<Vec<TrashRevision>>;
fn delete_trash(&self, trash_ids: Option<Vec<String>>) -> FlowyResult<()>;
fn create_trash(&self, trashes: Vec<TrashRevision>) -> FlowyResult<()>;
fn read_trash(&self, trash_id: Option<String>) -> FlowyResult<Vec<TrashRevision>>;
fn delete_trash(&self, trash_ids: Option<Vec<String>>) -> FlowyResult<()>;
}
pub struct FolderPersistence {
database: Arc<dyn WorkspaceDatabase>,
folder_editor: Arc<RwLock<Option<Arc<FolderEditor>>>>,
database: Arc<dyn WorkspaceDatabase>,
folder_editor: Arc<RwLock<Option<Arc<FolderEditor>>>>,
}
impl FolderPersistence {
pub fn new(database: Arc<dyn WorkspaceDatabase>, folder_editor: Arc<RwLock<Option<Arc<FolderEditor>>>>) -> Self {
Self {
database,
folder_editor,
}
pub fn new(
database: Arc<dyn WorkspaceDatabase>,
folder_editor: Arc<RwLock<Option<Arc<FolderEditor>>>>,
) -> Self {
Self {
database,
folder_editor,
}
}
#[deprecated(
since = "0.0.3",
note = "please use `begin_transaction` instead, this interface will be removed in the future"
)]
#[allow(dead_code)]
pub fn begin_transaction_v_1<F, O>(&self, f: F) -> FlowyResult<O>
where
F: for<'a> FnOnce(Box<dyn FolderPersistenceTransaction + 'a>) -> FlowyResult<O>,
{
//[[immediate_transaction]]
// https://sqlite.org/lang_transaction.html
// IMMEDIATE cause the database connection to start a new write immediately,
// without waiting for a write statement. The BEGIN IMMEDIATE might fail
// with SQLITE_BUSY if another write transaction is already active on another
// database connection.
//
// EXCLUSIVE is similar to IMMEDIATE in that a write transaction is started
// immediately. EXCLUSIVE and IMMEDIATE are the same in WAL mode, but in
// other journaling modes, EXCLUSIVE prevents other database connections from
// reading the database while the transaction is underway.
let conn = self.database.db_connection()?;
conn.immediate_transaction::<_, FlowyError, _>(|| f(Box::new(V1Transaction(&conn))))
}
pub async fn begin_transaction<F, O>(&self, f: F) -> FlowyResult<O>
where
F: FnOnce(Arc<dyn FolderPersistenceTransaction>) -> FlowyResult<O>,
{
match self.folder_editor.read().await.clone() {
None => Err(
FlowyError::internal().context("FolderEditor should be initialized after user login in."),
),
Some(editor) => f(editor),
}
}
pub fn db_pool(&self) -> FlowyResult<Arc<ConnectionPool>> {
self.database.db_pool()
}
pub async fn initialize(&self, user_id: &str, folder_id: &FolderId) -> FlowyResult<()> {
let migrations = FolderMigration::new(user_id, self.database.clone());
if let Some(migrated_folder) = migrations.run_v1_migration()? {
self
.save_folder(user_id, folder_id, migrated_folder)
.await?;
}
#[deprecated(
since = "0.0.3",
note = "please use `begin_transaction` instead, this interface will be removed in the future"
)]
#[allow(dead_code)]
pub fn begin_transaction_v_1<F, O>(&self, f: F) -> FlowyResult<O>
where
F: for<'a> FnOnce(Box<dyn FolderPersistenceTransaction + 'a>) -> FlowyResult<O>,
{
//[[immediate_transaction]]
// https://sqlite.org/lang_transaction.html
// IMMEDIATE cause the database connection to start a new write immediately,
// without waiting for a write statement. The BEGIN IMMEDIATE might fail
// with SQLITE_BUSY if another write transaction is already active on another
// database connection.
//
// EXCLUSIVE is similar to IMMEDIATE in that a write transaction is started
// immediately. EXCLUSIVE and IMMEDIATE are the same in WAL mode, but in
// other journaling modes, EXCLUSIVE prevents other database connections from
// reading the database while the transaction is underway.
let conn = self.database.db_connection()?;
conn.immediate_transaction::<_, FlowyError, _>(|| f(Box::new(V1Transaction(&conn))))
}
migrations.run_v2_migration(folder_id).await?;
migrations.run_v3_migration(folder_id).await?;
Ok(())
}
pub async fn begin_transaction<F, O>(&self, f: F) -> FlowyResult<O>
where
F: FnOnce(Arc<dyn FolderPersistenceTransaction>) -> FlowyResult<O>,
{
match self.folder_editor.read().await.clone() {
None => Err(FlowyError::internal().context("FolderEditor should be initialized after user login in.")),
Some(editor) => f(editor),
}
}
pub async fn save_folder(
&self,
user_id: &str,
folder_id: &FolderId,
folder: FolderPad,
) -> FlowyResult<()> {
let pool = self.database.db_pool()?;
let json = folder.to_json()?;
let delta_data = FolderOperationsBuilder::new()
.insert(&json)
.build()
.json_bytes();
let revision = Revision::initial_revision(folder_id.as_ref(), delta_data);
let record = SyncRecord {
revision,
state: RevisionState::Sync,
write_to_disk: true,
};
pub fn db_pool(&self) -> FlowyResult<Arc<ConnectionPool>> {
self.database.db_pool()
}
pub async fn initialize(&self, user_id: &str, folder_id: &FolderId) -> FlowyResult<()> {
let migrations = FolderMigration::new(user_id, self.database.clone());
if let Some(migrated_folder) = migrations.run_v1_migration()? {
self.save_folder(user_id, folder_id, migrated_folder).await?;
}
migrations.run_v2_migration(folder_id).await?;
migrations.run_v3_migration(folder_id).await?;
Ok(())
}
pub async fn save_folder(&self, user_id: &str, folder_id: &FolderId, folder: FolderPad) -> FlowyResult<()> {
let pool = self.database.db_pool()?;
let json = folder.to_json()?;
let delta_data = FolderOperationsBuilder::new().insert(&json).build().json_bytes();
let revision = Revision::initial_revision(folder_id.as_ref(), delta_data);
let record = SyncRecord {
revision,
state: RevisionState::Sync,
write_to_disk: true,
};
let disk_cache = make_folder_revision_disk_cache(user_id, pool);
disk_cache.delete_and_insert_records(folder_id.as_ref(), None, vec![record])
}
let disk_cache = make_folder_revision_disk_cache(user_id, pool);
disk_cache.delete_and_insert_records(folder_id.as_ref(), None, vec![record])
}
}
pub fn make_folder_revision_disk_cache(
user_id: &str,
pool: Arc<ConnectionPool>,
user_id: &str,
pool: Arc<ConnectionPool>,
) -> Arc<dyn RevisionDiskCache<Arc<ConnectionPool>, Error = FlowyError>> {
Arc::new(SQLiteFolderRevisionPersistence::new(user_id, pool))
Arc::new(SQLiteFolderRevisionPersistence::new(user_id, pool))
}

View File

@ -3,260 +3,280 @@ use diesel::{sql_types::Integer, update, SqliteConnection};
use flowy_error::{internal_error, FlowyError, FlowyResult};
use flowy_revision_persistence::{RevisionChangeset, RevisionDiskCache, RevisionState, SyncRecord};
use flowy_sqlite::{
impl_sql_integer_expression, insert_or_ignore_into,
prelude::*,
schema::{rev_table, rev_table::dsl},
ConnectionPool,
impl_sql_integer_expression, insert_or_ignore_into,
prelude::*,
schema::{rev_table, rev_table::dsl},
ConnectionPool,
};
use lib_infra::util::md5;
use revision_model::{Revision, RevisionRange};
use std::sync::Arc;
pub struct SQLiteFolderRevisionPersistence {
user_id: String,
pub(crate) pool: Arc<ConnectionPool>,
user_id: String,
pub(crate) pool: Arc<ConnectionPool>,
}
impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteFolderRevisionPersistence {
type Error = FlowyError;
type Error = FlowyError;
fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
FolderRevisionSql::create(revision_records, &conn)?;
Ok(())
}
fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
FolderRevisionSql::create(revision_records, &conn)?;
Ok(())
}
fn get_connection(&self) -> Result<Arc<ConnectionPool>, Self::Error> {
Ok(self.pool.clone())
}
fn get_connection(&self) -> Result<Arc<ConnectionPool>, Self::Error> {
Ok(self.pool.clone())
}
fn read_revision_records(
&self,
object_id: &str,
rev_ids: Option<Vec<i64>>,
) -> Result<Vec<SyncRecord>, Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
let records = FolderRevisionSql::read(&self.user_id, object_id, rev_ids, &conn)?;
Ok(records)
}
fn read_revision_records(
&self,
object_id: &str,
rev_ids: Option<Vec<i64>>,
) -> Result<Vec<SyncRecord>, Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
let records = FolderRevisionSql::read(&self.user_id, object_id, rev_ids, &conn)?;
Ok(records)
}
fn read_revision_records_with_range(
&self,
object_id: &str,
range: &RevisionRange,
) -> Result<Vec<SyncRecord>, Self::Error> {
let conn = &*self.pool.get().map_err(internal_error)?;
let revisions = FolderRevisionSql::read_with_range(&self.user_id, object_id, range.clone(), conn)?;
Ok(revisions)
}
fn read_revision_records_with_range(
&self,
object_id: &str,
range: &RevisionRange,
) -> Result<Vec<SyncRecord>, Self::Error> {
let conn = &*self.pool.get().map_err(internal_error)?;
let revisions =
FolderRevisionSql::read_with_range(&self.user_id, object_id, range.clone(), conn)?;
Ok(revisions)
}
fn update_revision_record(&self, changesets: Vec<RevisionChangeset>) -> FlowyResult<()> {
let conn = &*self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| {
for changeset in changesets {
FolderRevisionSql::update(changeset, conn)?;
}
Ok(())
})?;
Ok(())
}
fn update_revision_record(&self, changesets: Vec<RevisionChangeset>) -> FlowyResult<()> {
let conn = &*self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| {
for changeset in changesets {
FolderRevisionSql::update(changeset, conn)?;
}
Ok(())
})?;
Ok(())
}
fn delete_revision_records(&self, object_id: &str, rev_ids: Option<Vec<i64>>) -> Result<(), Self::Error> {
let conn = &*self.pool.get().map_err(internal_error)?;
FolderRevisionSql::delete(object_id, rev_ids, conn)?;
Ok(())
}
fn delete_revision_records(
&self,
object_id: &str,
rev_ids: Option<Vec<i64>>,
) -> Result<(), Self::Error> {
let conn = &*self.pool.get().map_err(internal_error)?;
FolderRevisionSql::delete(object_id, rev_ids, conn)?;
Ok(())
}
fn delete_and_insert_records(
&self,
object_id: &str,
deleted_rev_ids: Option<Vec<i64>>,
inserted_records: Vec<SyncRecord>,
) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| {
FolderRevisionSql::delete(object_id, deleted_rev_ids, &conn)?;
FolderRevisionSql::create(inserted_records, &conn)?;
Ok(())
})
}
fn delete_and_insert_records(
&self,
object_id: &str,
deleted_rev_ids: Option<Vec<i64>>,
inserted_records: Vec<SyncRecord>,
) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| {
FolderRevisionSql::delete(object_id, deleted_rev_ids, &conn)?;
FolderRevisionSql::create(inserted_records, &conn)?;
Ok(())
})
}
}
impl SQLiteFolderRevisionPersistence {
pub fn new(user_id: &str, pool: Arc<ConnectionPool>) -> Self {
Self {
user_id: user_id.to_owned(),
pool,
}
pub fn new(user_id: &str, pool: Arc<ConnectionPool>) -> Self {
Self {
user_id: user_id.to_owned(),
pool,
}
}
}
struct FolderRevisionSql {}
impl FolderRevisionSql {
fn create(revision_records: Vec<SyncRecord>, conn: &SqliteConnection) -> Result<(), FlowyError> {
// Batch insert: https://diesel.rs/guides/all-about-inserts.html
fn create(revision_records: Vec<SyncRecord>, conn: &SqliteConnection) -> Result<(), FlowyError> {
// Batch insert: https://diesel.rs/guides/all-about-inserts.html
let records = revision_records
.into_iter()
.map(|record| {
tracing::trace!(
"[TextRevisionSql] create revision: {}:{:?}",
record.revision.object_id,
record.revision.rev_id
);
let rev_state: TextRevisionState = record.state.into();
(
dsl::doc_id.eq(record.revision.object_id),
dsl::base_rev_id.eq(record.revision.base_rev_id),
dsl::rev_id.eq(record.revision.rev_id),
dsl::data.eq(record.revision.bytes),
dsl::state.eq(rev_state),
dsl::ty.eq(RevTableType::Local),
)
})
.collect::<Vec<_>>();
let _ = insert_or_ignore_into(dsl::rev_table).values(&records).execute(conn)?;
Ok(())
}
fn update(changeset: RevisionChangeset, conn: &SqliteConnection) -> Result<(), FlowyError> {
let state: TextRevisionState = changeset.state.clone().into();
let filter = dsl::rev_table
.filter(dsl::rev_id.eq(changeset.rev_id))
.filter(dsl::doc_id.eq(changeset.object_id));
let _ = update(filter).set(dsl::state.eq(state)).execute(conn)?;
tracing::debug!(
"[TextRevisionSql] update revision:{} state:to {:?}",
changeset.rev_id,
changeset.state
let records = revision_records
.into_iter()
.map(|record| {
tracing::trace!(
"[TextRevisionSql] create revision: {}:{:?}",
record.revision.object_id,
record.revision.rev_id
);
Ok(())
let rev_state: TextRevisionState = record.state.into();
(
dsl::doc_id.eq(record.revision.object_id),
dsl::base_rev_id.eq(record.revision.base_rev_id),
dsl::rev_id.eq(record.revision.rev_id),
dsl::data.eq(record.revision.bytes),
dsl::state.eq(rev_state),
dsl::ty.eq(RevTableType::Local),
)
})
.collect::<Vec<_>>();
let _ = insert_or_ignore_into(dsl::rev_table)
.values(&records)
.execute(conn)?;
Ok(())
}
fn update(changeset: RevisionChangeset, conn: &SqliteConnection) -> Result<(), FlowyError> {
let state: TextRevisionState = changeset.state.clone().into();
let filter = dsl::rev_table
.filter(dsl::rev_id.eq(changeset.rev_id))
.filter(dsl::doc_id.eq(changeset.object_id));
let _ = update(filter).set(dsl::state.eq(state)).execute(conn)?;
tracing::debug!(
"[TextRevisionSql] update revision:{} state:to {:?}",
changeset.rev_id,
changeset.state
);
Ok(())
}
fn read(
user_id: &str,
object_id: &str,
rev_ids: Option<Vec<i64>>,
conn: &SqliteConnection,
) -> Result<Vec<SyncRecord>, FlowyError> {
let mut sql = dsl::rev_table
.filter(dsl::doc_id.eq(object_id))
.into_boxed();
if let Some(rev_ids) = rev_ids {
sql = sql.filter(dsl::rev_id.eq_any(rev_ids));
}
let rows = sql.order(dsl::rev_id.asc()).load::<RevisionTable>(conn)?;
let records = rows
.into_iter()
.map(|row| mk_revision_record_from_table(user_id, row))
.collect::<Vec<_>>();
Ok(records)
}
fn read_with_range(
user_id: &str,
object_id: &str,
range: RevisionRange,
conn: &SqliteConnection,
) -> Result<Vec<SyncRecord>, FlowyError> {
let rev_tables = dsl::rev_table
.filter(dsl::rev_id.ge(range.start))
.filter(dsl::rev_id.le(range.end))
.filter(dsl::doc_id.eq(object_id))
.order(dsl::rev_id.asc())
.load::<RevisionTable>(conn)?;
let revisions = rev_tables
.into_iter()
.map(|table| mk_revision_record_from_table(user_id, table))
.collect::<Vec<_>>();
Ok(revisions)
}
fn delete(
object_id: &str,
rev_ids: Option<Vec<i64>>,
conn: &SqliteConnection,
) -> Result<(), FlowyError> {
let mut sql = diesel::delete(dsl::rev_table).into_boxed();
sql = sql.filter(dsl::doc_id.eq(object_id));
if let Some(rev_ids) = rev_ids {
tracing::trace!(
"[TextRevisionSql] Delete revision: {}:{:?}",
object_id,
rev_ids
);
sql = sql.filter(dsl::rev_id.eq_any(rev_ids));
}
fn read(
user_id: &str,
object_id: &str,
rev_ids: Option<Vec<i64>>,
conn: &SqliteConnection,
) -> Result<Vec<SyncRecord>, FlowyError> {
let mut sql = dsl::rev_table.filter(dsl::doc_id.eq(object_id)).into_boxed();
if let Some(rev_ids) = rev_ids {
sql = sql.filter(dsl::rev_id.eq_any(rev_ids));
}
let rows = sql.order(dsl::rev_id.asc()).load::<RevisionTable>(conn)?;
let records = rows
.into_iter()
.map(|row| mk_revision_record_from_table(user_id, row))
.collect::<Vec<_>>();
Ok(records)
}
fn read_with_range(
user_id: &str,
object_id: &str,
range: RevisionRange,
conn: &SqliteConnection,
) -> Result<Vec<SyncRecord>, FlowyError> {
let rev_tables = dsl::rev_table
.filter(dsl::rev_id.ge(range.start))
.filter(dsl::rev_id.le(range.end))
.filter(dsl::doc_id.eq(object_id))
.order(dsl::rev_id.asc())
.load::<RevisionTable>(conn)?;
let revisions = rev_tables
.into_iter()
.map(|table| mk_revision_record_from_table(user_id, table))
.collect::<Vec<_>>();
Ok(revisions)
}
fn delete(object_id: &str, rev_ids: Option<Vec<i64>>, conn: &SqliteConnection) -> Result<(), FlowyError> {
let mut sql = diesel::delete(dsl::rev_table).into_boxed();
sql = sql.filter(dsl::doc_id.eq(object_id));
if let Some(rev_ids) = rev_ids {
tracing::trace!("[TextRevisionSql] Delete revision: {}:{:?}", object_id, rev_ids);
sql = sql.filter(dsl::rev_id.eq_any(rev_ids));
}
let affected_row = sql.execute(conn)?;
tracing::trace!("[TextRevisionSql] Delete {} rows", affected_row);
Ok(())
}
let affected_row = sql.execute(conn)?;
tracing::trace!("[TextRevisionSql] Delete {} rows", affected_row);
Ok(())
}
}
#[derive(PartialEq, Clone, Debug, Queryable, Identifiable, Insertable, Associations)]
#[table_name = "rev_table"]
struct RevisionTable {
id: i32,
doc_id: String,
base_rev_id: i64,
rev_id: i64,
data: Vec<u8>,
state: TextRevisionState,
ty: RevTableType, // Deprecated
id: i32,
doc_id: String,
base_rev_id: i64,
rev_id: i64,
data: Vec<u8>,
state: TextRevisionState,
ty: RevTableType, // Deprecated
}
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash, FromSqlRow, AsExpression)]
#[repr(i32)]
#[sql_type = "Integer"]
enum TextRevisionState {
Sync = 0,
Ack = 1,
Sync = 0,
Ack = 1,
}
impl_sql_integer_expression!(TextRevisionState);
impl_rev_state_map!(TextRevisionState);
impl std::default::Default for TextRevisionState {
fn default() -> Self {
TextRevisionState::Sync
}
fn default() -> Self {
TextRevisionState::Sync
}
}
fn mk_revision_record_from_table(_user_id: &str, table: RevisionTable) -> SyncRecord {
let md5 = md5(&table.data);
let revision = Revision::new(
&table.doc_id,
table.base_rev_id,
table.rev_id,
Bytes::from(table.data),
md5,
);
SyncRecord {
revision,
state: table.state.into(),
write_to_disk: false,
}
let md5 = md5(&table.data);
let revision = Revision::new(
&table.doc_id,
table.base_rev_id,
table.rev_id,
Bytes::from(table.data),
md5,
);
SyncRecord {
revision,
state: table.state.into(),
write_to_disk: false,
}
}
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash, FromSqlRow, AsExpression)]
#[repr(i32)]
#[sql_type = "Integer"]
pub enum RevTableType {
Local = 0,
Remote = 1,
Local = 0,
Remote = 1,
}
impl_sql_integer_expression!(RevTableType);
impl std::default::Default for RevTableType {
fn default() -> Self {
RevTableType::Local
}
fn default() -> Self {
RevTableType::Local
}
}
impl std::convert::From<i32> for RevTableType {
fn from(value: i32) -> Self {
match value {
0 => RevTableType::Local,
1 => RevTableType::Remote,
o => {
tracing::error!("Unsupported rev type {}, fallback to RevTableType::Local", o);
RevTableType::Local
}
}
fn from(value: i32) -> Self {
match value {
0 => RevTableType::Local,
1 => RevTableType::Remote,
o => {
tracing::error!(
"Unsupported rev type {}, fallback to RevTableType::Local",
o
);
RevTableType::Local
},
}
}
}

View File

@ -3,95 +3,95 @@ use bytes::Bytes;
use flowy_error::{internal_error, FlowyResult};
use flowy_revision::{RevisionSnapshotData, RevisionSnapshotPersistence};
use flowy_sqlite::{
prelude::*,
schema::{folder_rev_snapshot, folder_rev_snapshot::dsl},
ConnectionPool,
prelude::*,
schema::{folder_rev_snapshot, folder_rev_snapshot::dsl},
ConnectionPool,
};
use lib_infra::util::timestamp;
use std::sync::Arc;
pub struct SQLiteFolderRevisionSnapshotPersistence {
object_id: String,
pool: Arc<ConnectionPool>,
object_id: String,
pool: Arc<ConnectionPool>,
}
impl SQLiteFolderRevisionSnapshotPersistence {
pub fn new(object_id: &str, pool: Arc<ConnectionPool>) -> Self {
Self {
object_id: object_id.to_string(),
pool,
}
pub fn new(object_id: &str, pool: Arc<ConnectionPool>) -> Self {
Self {
object_id: object_id.to_string(),
pool,
}
}
fn gen_snapshot_id(&self, rev_id: i64) -> String {
format!("{}:{}", self.object_id, rev_id)
}
fn gen_snapshot_id(&self, rev_id: i64) -> String {
format!("{}:{}", self.object_id, rev_id)
}
}
impl RevisionSnapshotPersistence for SQLiteFolderRevisionSnapshotPersistence {
fn should_generate_snapshot_from_range(&self, start_rev_id: i64, current_rev_id: i64) -> bool {
(current_rev_id - start_rev_id) >= 2
}
fn should_generate_snapshot_from_range(&self, start_rev_id: i64, current_rev_id: i64) -> bool {
(current_rev_id - start_rev_id) >= 2
}
fn write_snapshot(&self, rev_id: i64, data: Vec<u8>) -> FlowyResult<()> {
let conn = self.pool.get().map_err(internal_error)?;
let snapshot_id = self.gen_snapshot_id(rev_id);
let timestamp = timestamp();
let record = (
dsl::snapshot_id.eq(&snapshot_id),
dsl::object_id.eq(&self.object_id),
dsl::rev_id.eq(rev_id),
dsl::base_rev_id.eq(rev_id),
dsl::timestamp.eq(timestamp),
dsl::data.eq(data),
);
let _ = insert_or_ignore_into(dsl::folder_rev_snapshot)
.values(record)
.execute(&*conn)?;
Ok(())
}
fn write_snapshot(&self, rev_id: i64, data: Vec<u8>) -> FlowyResult<()> {
let conn = self.pool.get().map_err(internal_error)?;
let snapshot_id = self.gen_snapshot_id(rev_id);
let timestamp = timestamp();
let record = (
dsl::snapshot_id.eq(&snapshot_id),
dsl::object_id.eq(&self.object_id),
dsl::rev_id.eq(rev_id),
dsl::base_rev_id.eq(rev_id),
dsl::timestamp.eq(timestamp),
dsl::data.eq(data),
);
let _ = insert_or_ignore_into(dsl::folder_rev_snapshot)
.values(record)
.execute(&*conn)?;
Ok(())
}
fn read_snapshot(&self, rev_id: i64) -> FlowyResult<Option<RevisionSnapshotData>> {
let conn = self.pool.get().map_err(internal_error)?;
let snapshot_id = self.gen_snapshot_id(rev_id);
let record = dsl::folder_rev_snapshot
.filter(dsl::snapshot_id.eq(&snapshot_id))
.first::<FolderSnapshotRecord>(&*conn)?;
fn read_snapshot(&self, rev_id: i64) -> FlowyResult<Option<RevisionSnapshotData>> {
let conn = self.pool.get().map_err(internal_error)?;
let snapshot_id = self.gen_snapshot_id(rev_id);
let record = dsl::folder_rev_snapshot
.filter(dsl::snapshot_id.eq(&snapshot_id))
.first::<FolderSnapshotRecord>(&*conn)?;
Ok(Some(record.into()))
}
Ok(Some(record.into()))
}
fn read_last_snapshot(&self) -> FlowyResult<Option<RevisionSnapshotData>> {
let conn = self.pool.get().map_err(internal_error)?;
let latest_record = dsl::folder_rev_snapshot
fn read_last_snapshot(&self) -> FlowyResult<Option<RevisionSnapshotData>> {
let conn = self.pool.get().map_err(internal_error)?;
let latest_record = dsl::folder_rev_snapshot
.filter(dsl::object_id.eq(&self.object_id))
.order(dsl::timestamp.desc())
// .select(max(dsl::rev_id))
// .select((dsl::id, dsl::object_id, dsl::rev_id, dsl::data))
.first::<FolderSnapshotRecord>(&*conn)?;
Ok(Some(latest_record.into()))
}
Ok(Some(latest_record.into()))
}
}
#[derive(PartialEq, Clone, Debug, Queryable, Identifiable, Insertable, Associations)]
#[table_name = "folder_rev_snapshot"]
#[primary_key("snapshot_id")]
struct FolderSnapshotRecord {
snapshot_id: String,
object_id: String,
rev_id: i64,
base_rev_id: i64,
timestamp: i64,
data: Vec<u8>,
snapshot_id: String,
object_id: String,
rev_id: i64,
base_rev_id: i64,
timestamp: i64,
data: Vec<u8>,
}
impl std::convert::From<FolderSnapshotRecord> for RevisionSnapshotData {
fn from(record: FolderSnapshotRecord) -> Self {
RevisionSnapshotData {
rev_id: record.rev_id,
base_rev_id: record.base_rev_id,
timestamp: record.timestamp,
data: Bytes::from(record.data),
}
fn from(record: FolderSnapshotRecord) -> Self {
RevisionSnapshotData {
rev_id: record.rev_id,
base_rev_id: record.base_rev_id,
timestamp: record.timestamp,
data: Bytes::from(record.data),
}
}
}

View File

@ -1,163 +1,169 @@
use crate::entities::{
app::UpdateAppParams,
trash::{TrashPB, TrashType},
app::UpdateAppParams,
trash::{TrashPB, TrashType},
};
use crate::{errors::FlowyError, services::persistence::version_1::workspace_sql::WorkspaceTable};
use flowy_sqlite::{
prelude::*,
schema::{app_table, app_table::dsl},
SqliteConnection,
prelude::*,
schema::{app_table, app_table::dsl},
SqliteConnection,
};
use folder_model::AppRevision;
pub struct AppTableSql();
impl AppTableSql {
pub(crate) fn create_app(app_rev: AppRevision, conn: &SqliteConnection) -> Result<(), FlowyError> {
let app_table = AppTable::new(app_rev);
match diesel_record_count!(app_table, &app_table.id, conn) {
0 => diesel_insert_table!(app_table, app_table.clone(), conn),
_ => {
let changeset = AppChangeset::from_table(app_table);
diesel_update_table!(app_table, changeset, conn)
}
}
Ok(())
pub(crate) fn create_app(
app_rev: AppRevision,
conn: &SqliteConnection,
) -> Result<(), FlowyError> {
let app_table = AppTable::new(app_rev);
match diesel_record_count!(app_table, &app_table.id, conn) {
0 => diesel_insert_table!(app_table, app_table.clone(), conn),
_ => {
let changeset = AppChangeset::from_table(app_table);
diesel_update_table!(app_table, changeset, conn)
},
}
Ok(())
}
pub(crate) fn update_app(changeset: AppChangeset, conn: &SqliteConnection) -> Result<(), FlowyError> {
diesel_update_table!(app_table, changeset, conn);
Ok(())
}
pub(crate) fn update_app(
changeset: AppChangeset,
conn: &SqliteConnection,
) -> Result<(), FlowyError> {
diesel_update_table!(app_table, changeset, conn);
Ok(())
}
pub(crate) fn read_app(app_id: &str, conn: &SqliteConnection) -> Result<AppTable, FlowyError> {
let filter = dsl::app_table.filter(app_table::id.eq(app_id)).into_boxed();
let app_table = filter.first::<AppTable>(conn)?;
Ok(app_table)
}
pub(crate) fn read_app(app_id: &str, conn: &SqliteConnection) -> Result<AppTable, FlowyError> {
let filter = dsl::app_table.filter(app_table::id.eq(app_id)).into_boxed();
let app_table = filter.first::<AppTable>(conn)?;
Ok(app_table)
}
pub(crate) fn read_workspace_apps(
workspace_id: &str,
conn: &SqliteConnection,
) -> Result<Vec<AppTable>, FlowyError> {
let app_table = dsl::app_table
.filter(app_table::workspace_id.eq(workspace_id))
.order(app_table::create_time.asc())
.load::<AppTable>(conn)?;
pub(crate) fn read_workspace_apps(
workspace_id: &str,
conn: &SqliteConnection,
) -> Result<Vec<AppTable>, FlowyError> {
let app_table = dsl::app_table
.filter(app_table::workspace_id.eq(workspace_id))
.order(app_table::create_time.asc())
.load::<AppTable>(conn)?;
Ok(app_table)
}
Ok(app_table)
}
pub(crate) fn delete_app(app_id: &str, conn: &SqliteConnection) -> Result<AppTable, FlowyError> {
let app_table = dsl::app_table
.filter(app_table::id.eq(app_id))
.first::<AppTable>(conn)?;
diesel_delete_table!(app_table, app_id, conn);
Ok(app_table)
}
pub(crate) fn delete_app(app_id: &str, conn: &SqliteConnection) -> Result<AppTable, FlowyError> {
let app_table = dsl::app_table
.filter(app_table::id.eq(app_id))
.first::<AppTable>(conn)?;
diesel_delete_table!(app_table, app_id, conn);
Ok(app_table)
}
// pub(crate) fn read_views_belong_to_app(
// &self,
// app_id: &str,
// ) -> Result<Vec<ViewTable>, FlowyError> {
// let conn = self.database.db_connection()?;
//
// let views = conn.immediate_transaction::<_, FlowyError, _>(|| {
// let app_table: AppTable = dsl::app_table
// .filter(app_table::id.eq(app_id))
// .first::<AppTable>(&*(conn))?;
// let views =
// ViewTable::belonging_to(&app_table).load::<ViewTable>(&*conn)?;
// Ok(views)
// })?;
//
// Ok(views)
// }
// pub(crate) fn read_views_belong_to_app(
// &self,
// app_id: &str,
// ) -> Result<Vec<ViewTable>, FlowyError> {
// let conn = self.database.db_connection()?;
//
// let views = conn.immediate_transaction::<_, FlowyError, _>(|| {
// let app_table: AppTable = dsl::app_table
// .filter(app_table::id.eq(app_id))
// .first::<AppTable>(&*(conn))?;
// let views =
// ViewTable::belonging_to(&app_table).load::<ViewTable>(&*conn)?;
// Ok(views)
// })?;
//
// Ok(views)
// }
}
#[derive(PartialEq, Clone, Debug, Queryable, Identifiable, Insertable, Associations)]
#[belongs_to(WorkspaceTable, foreign_key = "workspace_id")]
#[table_name = "app_table"]
pub(crate) struct AppTable {
pub id: String,
pub workspace_id: String, // equal to #[belongs_to(Workspace, foreign_key = "workspace_id")].
pub name: String,
pub desc: String,
pub color_style: Vec<u8>,
pub last_view_id: Option<String>,
pub modified_time: i64,
pub create_time: i64,
pub version: i64,
pub is_trash: bool,
pub id: String,
pub workspace_id: String, // equal to #[belongs_to(Workspace, foreign_key = "workspace_id")].
pub name: String,
pub desc: String,
pub color_style: Vec<u8>,
pub last_view_id: Option<String>,
pub modified_time: i64,
pub create_time: i64,
pub version: i64,
pub is_trash: bool,
}
impl AppTable {
pub fn new(app_rev: AppRevision) -> Self {
Self {
id: app_rev.id,
workspace_id: app_rev.workspace_id,
name: app_rev.name,
desc: app_rev.desc,
color_style: Default::default(),
last_view_id: None,
modified_time: app_rev.modified_time,
create_time: app_rev.create_time,
version: 0,
is_trash: false,
}
pub fn new(app_rev: AppRevision) -> Self {
Self {
id: app_rev.id,
workspace_id: app_rev.workspace_id,
name: app_rev.name,
desc: app_rev.desc,
color_style: Default::default(),
last_view_id: None,
modified_time: app_rev.modified_time,
create_time: app_rev.create_time,
version: 0,
is_trash: false,
}
}
}
impl std::convert::From<AppTable> for TrashPB {
fn from(table: AppTable) -> Self {
TrashPB {
id: table.id,
name: table.name,
modified_time: table.modified_time,
create_time: table.create_time,
ty: TrashType::TrashApp,
}
fn from(table: AppTable) -> Self {
TrashPB {
id: table.id,
name: table.name,
modified_time: table.modified_time,
create_time: table.create_time,
ty: TrashType::TrashApp,
}
}
}
#[derive(AsChangeset, Identifiable, Default, Debug)]
#[table_name = "app_table"]
pub struct AppChangeset {
pub id: String,
pub name: Option<String>,
pub desc: Option<String>,
pub is_trash: Option<bool>,
pub id: String,
pub name: Option<String>,
pub desc: Option<String>,
pub is_trash: Option<bool>,
}
impl AppChangeset {
pub(crate) fn new(params: UpdateAppParams) -> Self {
AppChangeset {
id: params.app_id,
name: params.name,
desc: params.desc,
is_trash: params.is_trash,
}
pub(crate) fn new(params: UpdateAppParams) -> Self {
AppChangeset {
id: params.app_id,
name: params.name,
desc: params.desc,
is_trash: params.is_trash,
}
}
pub(crate) fn from_table(table: AppTable) -> Self {
AppChangeset {
id: table.id,
name: Some(table.name),
desc: Some(table.desc),
is_trash: Some(table.is_trash),
}
pub(crate) fn from_table(table: AppTable) -> Self {
AppChangeset {
id: table.id,
name: Some(table.name),
desc: Some(table.desc),
is_trash: Some(table.is_trash),
}
}
}
impl std::convert::From<AppTable> for AppRevision {
fn from(table: AppTable) -> Self {
AppRevision {
id: table.id,
workspace_id: table.workspace_id,
name: table.name,
desc: table.desc,
belongings: vec![],
version: table.version,
modified_time: table.modified_time,
create_time: table.create_time,
}
fn from(table: AppTable) -> Self {
AppRevision {
id: table.id,
workspace_id: table.workspace_id,
name: table.name,
desc: table.desc,
belongings: vec![],
version: table.version,
modified_time: table.modified_time,
create_time: table.create_time,
}
}
}

View File

@ -1,65 +1,68 @@
use crate::errors::FlowyError;
use diesel::sql_types::Integer;
use flowy_sqlite::{
prelude::*,
schema::{trash_table, trash_table::dsl},
SqliteConnection,
prelude::*,
schema::{trash_table, trash_table::dsl},
SqliteConnection,
};
use folder_model::{TrashRevision, TrashTypeRevision};
pub struct TrashTableSql();
impl TrashTableSql {
pub(crate) fn create_trash(trashes: Vec<TrashRevision>, conn: &SqliteConnection) -> Result<(), FlowyError> {
for trash_rev in trashes {
let trash_table: TrashTable = trash_rev.into();
match diesel_record_count!(trash_table, &trash_table.id, conn) {
0 => diesel_insert_table!(trash_table, trash_table.clone(), conn),
_ => {
let changeset = TrashChangeset::from(trash_table);
diesel_update_table!(trash_table, changeset, conn)
}
}
}
Ok(())
pub(crate) fn create_trash(
trashes: Vec<TrashRevision>,
conn: &SqliteConnection,
) -> Result<(), FlowyError> {
for trash_rev in trashes {
let trash_table: TrashTable = trash_rev.into();
match diesel_record_count!(trash_table, &trash_table.id, conn) {
0 => diesel_insert_table!(trash_table, trash_table.clone(), conn),
_ => {
let changeset = TrashChangeset::from(trash_table);
diesel_update_table!(trash_table, changeset, conn)
},
}
}
pub(crate) fn read_all(conn: &SqliteConnection) -> Result<Vec<TrashRevision>, FlowyError> {
let trash_tables = dsl::trash_table.load::<TrashTable>(conn)?;
let items = trash_tables
.into_iter()
.map(TrashRevision::from)
.collect::<Vec<TrashRevision>>();
Ok(items)
}
Ok(())
}
pub(crate) fn delete_all(conn: &SqliteConnection) -> Result<(), FlowyError> {
let _ = diesel::delete(dsl::trash_table).execute(conn)?;
Ok(())
}
pub(crate) fn read_all(conn: &SqliteConnection) -> Result<Vec<TrashRevision>, FlowyError> {
let trash_tables = dsl::trash_table.load::<TrashTable>(conn)?;
let items = trash_tables
.into_iter()
.map(TrashRevision::from)
.collect::<Vec<TrashRevision>>();
Ok(items)
}
pub(crate) fn read(trash_id: &str, conn: &SqliteConnection) -> Result<TrashTable, FlowyError> {
let trash_table = dsl::trash_table
.filter(trash_table::id.eq(trash_id))
.first::<TrashTable>(conn)?;
Ok(trash_table)
}
pub(crate) fn delete_all(conn: &SqliteConnection) -> Result<(), FlowyError> {
let _ = diesel::delete(dsl::trash_table).execute(conn)?;
Ok(())
}
pub(crate) fn delete_trash(trash_id: &str, conn: &SqliteConnection) -> Result<(), FlowyError> {
diesel_delete_table!(trash_table, trash_id, conn);
Ok(())
}
pub(crate) fn read(trash_id: &str, conn: &SqliteConnection) -> Result<TrashTable, FlowyError> {
let trash_table = dsl::trash_table
.filter(trash_table::id.eq(trash_id))
.first::<TrashTable>(conn)?;
Ok(trash_table)
}
pub(crate) fn delete_trash(trash_id: &str, conn: &SqliteConnection) -> Result<(), FlowyError> {
diesel_delete_table!(trash_table, trash_id, conn);
Ok(())
}
}
#[derive(PartialEq, Clone, Debug, Queryable, Identifiable, Insertable, Associations)]
#[table_name = "trash_table"]
pub(crate) struct TrashTable {
pub id: String,
pub name: String,
pub desc: String,
pub modified_time: i64,
pub create_time: i64,
pub ty: SqlTrashType,
pub id: String,
pub name: String,
pub desc: String,
pub modified_time: i64,
pub create_time: i64,
pub ty: SqlTrashType,
}
// impl std::convert::From<TrashTable> for Trash {
// fn from(table: TrashTable) -> Self {
@ -74,86 +77,86 @@ pub(crate) struct TrashTable {
// }
//
impl std::convert::From<TrashTable> for TrashRevision {
fn from(trash: TrashTable) -> Self {
TrashRevision {
id: trash.id,
name: trash.name,
modified_time: trash.modified_time,
create_time: trash.create_time,
ty: trash.ty.into(),
}
fn from(trash: TrashTable) -> Self {
TrashRevision {
id: trash.id,
name: trash.name,
modified_time: trash.modified_time,
create_time: trash.create_time,
ty: trash.ty.into(),
}
}
}
impl std::convert::From<TrashRevision> for TrashTable {
fn from(trash_rev: TrashRevision) -> Self {
TrashTable {
id: trash_rev.id,
name: trash_rev.name,
desc: "".to_string(),
modified_time: trash_rev.modified_time,
create_time: trash_rev.create_time,
ty: trash_rev.ty.into(),
}
fn from(trash_rev: TrashRevision) -> Self {
TrashTable {
id: trash_rev.id,
name: trash_rev.name,
desc: "".to_string(),
modified_time: trash_rev.modified_time,
create_time: trash_rev.create_time,
ty: trash_rev.ty.into(),
}
}
}
#[derive(AsChangeset, Identifiable, Clone, Default, Debug)]
#[table_name = "trash_table"]
pub(crate) struct TrashChangeset {
pub id: String,
pub name: Option<String>,
pub modified_time: i64,
pub id: String,
pub name: Option<String>,
pub modified_time: i64,
}
impl std::convert::From<TrashTable> for TrashChangeset {
fn from(trash: TrashTable) -> Self {
TrashChangeset {
id: trash.id,
name: Some(trash.name),
modified_time: trash.modified_time,
}
fn from(trash: TrashTable) -> Self {
TrashChangeset {
id: trash.id,
name: Some(trash.name),
modified_time: trash.modified_time,
}
}
}
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash, FromSqlRow, AsExpression)]
#[repr(i32)]
#[sql_type = "Integer"]
pub(crate) enum SqlTrashType {
Unknown = 0,
View = 1,
App = 2,
Unknown = 0,
View = 1,
App = 2,
}
impl std::convert::From<i32> for SqlTrashType {
fn from(value: i32) -> Self {
match value {
0 => SqlTrashType::Unknown,
1 => SqlTrashType::View,
2 => SqlTrashType::App,
_o => SqlTrashType::Unknown,
}
fn from(value: i32) -> Self {
match value {
0 => SqlTrashType::Unknown,
1 => SqlTrashType::View,
2 => SqlTrashType::App,
_o => SqlTrashType::Unknown,
}
}
}
impl_sql_integer_expression!(SqlTrashType);
impl std::convert::From<SqlTrashType> for TrashTypeRevision {
fn from(ty: SqlTrashType) -> Self {
match ty {
SqlTrashType::Unknown => TrashTypeRevision::Unknown,
SqlTrashType::View => TrashTypeRevision::TrashView,
SqlTrashType::App => TrashTypeRevision::TrashApp,
}
fn from(ty: SqlTrashType) -> Self {
match ty {
SqlTrashType::Unknown => TrashTypeRevision::Unknown,
SqlTrashType::View => TrashTypeRevision::TrashView,
SqlTrashType::App => TrashTypeRevision::TrashApp,
}
}
}
impl std::convert::From<TrashTypeRevision> for SqlTrashType {
fn from(ty: TrashTypeRevision) -> Self {
match ty {
TrashTypeRevision::Unknown => SqlTrashType::Unknown,
TrashTypeRevision::TrashView => SqlTrashType::View,
TrashTypeRevision::TrashApp => SqlTrashType::App,
}
fn from(ty: TrashTypeRevision) -> Self {
match ty {
TrashTypeRevision::Unknown => SqlTrashType::Unknown,
TrashTypeRevision::TrashView => SqlTrashType::View,
TrashTypeRevision::TrashApp => SqlTrashType::App,
}
}
}

View File

@ -1,10 +1,10 @@
use crate::services::persistence::{
version_1::{
app_sql::{AppChangeset, AppTableSql},
view_sql::{ViewChangeset, ViewTableSql},
workspace_sql::{WorkspaceChangeset, WorkspaceTableSql},
},
FolderPersistenceTransaction, TrashTableSql,
version_1::{
app_sql::{AppChangeset, AppTableSql},
view_sql::{ViewChangeset, ViewTableSql},
workspace_sql::{WorkspaceChangeset, WorkspaceTableSql},
},
FolderPersistenceTransaction, TrashTableSql,
};
use flowy_error::FlowyResult;
use flowy_sqlite::DBConnection;
@ -14,192 +14,209 @@ use folder_model::{AppRevision, TrashRevision, ViewRevision, WorkspaceRevision};
pub struct V1Transaction<'a>(pub &'a DBConnection);
impl<'a> FolderPersistenceTransaction for V1Transaction<'a> {
fn create_workspace(&self, user_id: &str, workspace_rev: WorkspaceRevision) -> FlowyResult<()> {
WorkspaceTableSql::create_workspace(user_id, workspace_rev, self.0)?;
Ok(())
}
fn create_workspace(&self, user_id: &str, workspace_rev: WorkspaceRevision) -> FlowyResult<()> {
WorkspaceTableSql::create_workspace(user_id, workspace_rev, self.0)?;
Ok(())
}
fn read_workspaces(&self, user_id: &str, workspace_id: Option<String>) -> FlowyResult<Vec<WorkspaceRevision>> {
let tables = WorkspaceTableSql::read_workspaces(user_id, workspace_id, self.0)?;
let workspaces = tables.into_iter().map(WorkspaceRevision::from).collect::<Vec<_>>();
Ok(workspaces)
}
fn read_workspaces(
&self,
user_id: &str,
workspace_id: Option<String>,
) -> FlowyResult<Vec<WorkspaceRevision>> {
let tables = WorkspaceTableSql::read_workspaces(user_id, workspace_id, self.0)?;
let workspaces = tables
.into_iter()
.map(WorkspaceRevision::from)
.collect::<Vec<_>>();
Ok(workspaces)
}
fn update_workspace(&self, changeset: WorkspaceChangeset) -> FlowyResult<()> {
WorkspaceTableSql::update_workspace(changeset, self.0)
}
fn update_workspace(&self, changeset: WorkspaceChangeset) -> FlowyResult<()> {
WorkspaceTableSql::update_workspace(changeset, self.0)
}
fn delete_workspace(&self, workspace_id: &str) -> FlowyResult<()> {
WorkspaceTableSql::delete_workspace(workspace_id, self.0)
}
fn delete_workspace(&self, workspace_id: &str) -> FlowyResult<()> {
WorkspaceTableSql::delete_workspace(workspace_id, self.0)
}
fn create_app(&self, app_rev: AppRevision) -> FlowyResult<()> {
AppTableSql::create_app(app_rev, self.0)?;
Ok(())
}
fn create_app(&self, app_rev: AppRevision) -> FlowyResult<()> {
AppTableSql::create_app(app_rev, self.0)?;
Ok(())
}
fn update_app(&self, changeset: AppChangeset) -> FlowyResult<()> {
AppTableSql::update_app(changeset, self.0)?;
Ok(())
}
fn update_app(&self, changeset: AppChangeset) -> FlowyResult<()> {
AppTableSql::update_app(changeset, self.0)?;
Ok(())
}
fn read_app(&self, app_id: &str) -> FlowyResult<AppRevision> {
let app_revision: AppRevision = AppTableSql::read_app(app_id, self.0)?.into();
Ok(app_revision)
}
fn read_app(&self, app_id: &str) -> FlowyResult<AppRevision> {
let app_revision: AppRevision = AppTableSql::read_app(app_id, self.0)?.into();
Ok(app_revision)
}
fn read_workspace_apps(&self, workspace_id: &str) -> FlowyResult<Vec<AppRevision>> {
let tables = AppTableSql::read_workspace_apps(workspace_id, self.0)?;
let apps = tables.into_iter().map(AppRevision::from).collect::<Vec<_>>();
Ok(apps)
}
fn read_workspace_apps(&self, workspace_id: &str) -> FlowyResult<Vec<AppRevision>> {
let tables = AppTableSql::read_workspace_apps(workspace_id, self.0)?;
let apps = tables
.into_iter()
.map(AppRevision::from)
.collect::<Vec<_>>();
Ok(apps)
}
fn delete_app(&self, app_id: &str) -> FlowyResult<AppRevision> {
let app_revision: AppRevision = AppTableSql::delete_app(app_id, self.0)?.into();
Ok(app_revision)
}
fn delete_app(&self, app_id: &str) -> FlowyResult<AppRevision> {
let app_revision: AppRevision = AppTableSql::delete_app(app_id, self.0)?.into();
Ok(app_revision)
}
fn move_app(&self, _app_id: &str, _from: usize, _to: usize) -> FlowyResult<()> {
Ok(())
}
fn move_app(&self, _app_id: &str, _from: usize, _to: usize) -> FlowyResult<()> {
Ok(())
}
fn create_view(&self, view_rev: ViewRevision) -> FlowyResult<()> {
ViewTableSql::create_view(view_rev, self.0)?;
Ok(())
}
fn create_view(&self, view_rev: ViewRevision) -> FlowyResult<()> {
ViewTableSql::create_view(view_rev, self.0)?;
Ok(())
}
fn read_view(&self, view_id: &str) -> FlowyResult<ViewRevision> {
let view_revision: ViewRevision = ViewTableSql::read_view(view_id, self.0)?.into();
Ok(view_revision)
}
fn read_view(&self, view_id: &str) -> FlowyResult<ViewRevision> {
let view_revision: ViewRevision = ViewTableSql::read_view(view_id, self.0)?.into();
Ok(view_revision)
}
fn read_views(&self, belong_to_id: &str) -> FlowyResult<Vec<ViewRevision>> {
let tables = ViewTableSql::read_views(belong_to_id, self.0)?;
let views = tables.into_iter().map(ViewRevision::from).collect::<Vec<_>>();
Ok(views)
}
fn read_views(&self, belong_to_id: &str) -> FlowyResult<Vec<ViewRevision>> {
let tables = ViewTableSql::read_views(belong_to_id, self.0)?;
let views = tables
.into_iter()
.map(ViewRevision::from)
.collect::<Vec<_>>();
Ok(views)
}
fn update_view(&self, changeset: ViewChangeset) -> FlowyResult<()> {
ViewTableSql::update_view(changeset, self.0)?;
Ok(())
}
fn update_view(&self, changeset: ViewChangeset) -> FlowyResult<()> {
ViewTableSql::update_view(changeset, self.0)?;
Ok(())
}
fn delete_view(&self, view_id: &str) -> FlowyResult<ViewRevision> {
let view_revision: ViewRevision = ViewTableSql::read_view(view_id, self.0)?.into();
ViewTableSql::delete_view(view_id, self.0)?;
Ok(view_revision)
}
fn delete_view(&self, view_id: &str) -> FlowyResult<ViewRevision> {
let view_revision: ViewRevision = ViewTableSql::read_view(view_id, self.0)?.into();
ViewTableSql::delete_view(view_id, self.0)?;
Ok(view_revision)
}
fn move_view(&self, _view_id: &str, _from: usize, _to: usize) -> FlowyResult<()> {
Ok(())
}
fn move_view(&self, _view_id: &str, _from: usize, _to: usize) -> FlowyResult<()> {
Ok(())
}
fn create_trash(&self, trashes: Vec<TrashRevision>) -> FlowyResult<()> {
TrashTableSql::create_trash(trashes, self.0)?;
Ok(())
}
fn create_trash(&self, trashes: Vec<TrashRevision>) -> FlowyResult<()> {
TrashTableSql::create_trash(trashes, self.0)?;
Ok(())
}
fn read_trash(&self, trash_id: Option<String>) -> FlowyResult<Vec<TrashRevision>> {
match trash_id {
None => TrashTableSql::read_all(self.0),
Some(trash_id) => {
let trash_revision: TrashRevision = TrashTableSql::read(&trash_id, self.0)?.into();
Ok(vec![trash_revision])
}
}
}
fn delete_trash(&self, trash_ids: Option<Vec<String>>) -> FlowyResult<()> {
match trash_ids {
None => TrashTableSql::delete_all(self.0),
Some(trash_ids) => {
for trash_id in &trash_ids {
TrashTableSql::delete_trash(trash_id, self.0)?;
}
Ok(())
}
fn read_trash(&self, trash_id: Option<String>) -> FlowyResult<Vec<TrashRevision>> {
match trash_id {
None => TrashTableSql::read_all(self.0),
Some(trash_id) => {
let trash_revision: TrashRevision = TrashTableSql::read(&trash_id, self.0)?.into();
Ok(vec![trash_revision])
},
}
}
fn delete_trash(&self, trash_ids: Option<Vec<String>>) -> FlowyResult<()> {
match trash_ids {
None => TrashTableSql::delete_all(self.0),
Some(trash_ids) => {
for trash_id in &trash_ids {
TrashTableSql::delete_trash(trash_id, self.0)?;
}
Ok(())
},
}
}
}
// https://www.reddit.com/r/rust/comments/droxdg/why_arent_traits_impld_for_boxdyn_trait/
impl<T> FolderPersistenceTransaction for Box<T>
where
T: FolderPersistenceTransaction + ?Sized,
T: FolderPersistenceTransaction + ?Sized,
{
fn create_workspace(&self, user_id: &str, workspace_rev: WorkspaceRevision) -> FlowyResult<()> {
(**self).create_workspace(user_id, workspace_rev)
}
fn create_workspace(&self, user_id: &str, workspace_rev: WorkspaceRevision) -> FlowyResult<()> {
(**self).create_workspace(user_id, workspace_rev)
}
fn read_workspaces(&self, user_id: &str, workspace_id: Option<String>) -> FlowyResult<Vec<WorkspaceRevision>> {
(**self).read_workspaces(user_id, workspace_id)
}
fn read_workspaces(
&self,
user_id: &str,
workspace_id: Option<String>,
) -> FlowyResult<Vec<WorkspaceRevision>> {
(**self).read_workspaces(user_id, workspace_id)
}
fn update_workspace(&self, changeset: WorkspaceChangeset) -> FlowyResult<()> {
(**self).update_workspace(changeset)
}
fn update_workspace(&self, changeset: WorkspaceChangeset) -> FlowyResult<()> {
(**self).update_workspace(changeset)
}
fn delete_workspace(&self, workspace_id: &str) -> FlowyResult<()> {
(**self).delete_workspace(workspace_id)
}
fn delete_workspace(&self, workspace_id: &str) -> FlowyResult<()> {
(**self).delete_workspace(workspace_id)
}
fn create_app(&self, app_rev: AppRevision) -> FlowyResult<()> {
(**self).create_app(app_rev)
}
fn create_app(&self, app_rev: AppRevision) -> FlowyResult<()> {
(**self).create_app(app_rev)
}
fn update_app(&self, changeset: AppChangeset) -> FlowyResult<()> {
(**self).update_app(changeset)
}
fn update_app(&self, changeset: AppChangeset) -> FlowyResult<()> {
(**self).update_app(changeset)
}
fn read_app(&self, app_id: &str) -> FlowyResult<AppRevision> {
(**self).read_app(app_id)
}
fn read_app(&self, app_id: &str) -> FlowyResult<AppRevision> {
(**self).read_app(app_id)
}
fn read_workspace_apps(&self, workspace_id: &str) -> FlowyResult<Vec<AppRevision>> {
(**self).read_workspace_apps(workspace_id)
}
fn read_workspace_apps(&self, workspace_id: &str) -> FlowyResult<Vec<AppRevision>> {
(**self).read_workspace_apps(workspace_id)
}
fn delete_app(&self, app_id: &str) -> FlowyResult<AppRevision> {
(**self).delete_app(app_id)
}
fn delete_app(&self, app_id: &str) -> FlowyResult<AppRevision> {
(**self).delete_app(app_id)
}
fn move_app(&self, _app_id: &str, _from: usize, _to: usize) -> FlowyResult<()> {
Ok(())
}
fn move_app(&self, _app_id: &str, _from: usize, _to: usize) -> FlowyResult<()> {
Ok(())
}
fn create_view(&self, view_rev: ViewRevision) -> FlowyResult<()> {
(**self).create_view(view_rev)
}
fn create_view(&self, view_rev: ViewRevision) -> FlowyResult<()> {
(**self).create_view(view_rev)
}
fn read_view(&self, view_id: &str) -> FlowyResult<ViewRevision> {
(**self).read_view(view_id)
}
fn read_view(&self, view_id: &str) -> FlowyResult<ViewRevision> {
(**self).read_view(view_id)
}
fn read_views(&self, belong_to_id: &str) -> FlowyResult<Vec<ViewRevision>> {
(**self).read_views(belong_to_id)
}
fn read_views(&self, belong_to_id: &str) -> FlowyResult<Vec<ViewRevision>> {
(**self).read_views(belong_to_id)
}
fn update_view(&self, changeset: ViewChangeset) -> FlowyResult<()> {
(**self).update_view(changeset)
}
fn update_view(&self, changeset: ViewChangeset) -> FlowyResult<()> {
(**self).update_view(changeset)
}
fn delete_view(&self, view_id: &str) -> FlowyResult<ViewRevision> {
(**self).delete_view(view_id)
}
fn delete_view(&self, view_id: &str) -> FlowyResult<ViewRevision> {
(**self).delete_view(view_id)
}
fn move_view(&self, _view_id: &str, _from: usize, _to: usize) -> FlowyResult<()> {
Ok(())
}
fn move_view(&self, _view_id: &str, _from: usize, _to: usize) -> FlowyResult<()> {
Ok(())
}
fn create_trash(&self, trashes: Vec<TrashRevision>) -> FlowyResult<()> {
(**self).create_trash(trashes)
}
fn create_trash(&self, trashes: Vec<TrashRevision>) -> FlowyResult<()> {
(**self).create_trash(trashes)
}
fn read_trash(&self, trash_id: Option<String>) -> FlowyResult<Vec<TrashRevision>> {
(**self).read_trash(trash_id)
}
fn read_trash(&self, trash_id: Option<String>) -> FlowyResult<Vec<TrashRevision>> {
(**self).read_trash(trash_id)
}
fn delete_trash(&self, trash_ids: Option<Vec<String>>) -> FlowyResult<()> {
(**self).delete_trash(trash_ids)
}
fn delete_trash(&self, trash_ids: Option<Vec<String>>) -> FlowyResult<()> {
(**self).delete_trash(trash_ids)
}
}

View File

@ -1,16 +1,16 @@
use crate::{
entities::{
trash::{TrashPB, TrashType},
view::UpdateViewParams,
},
errors::FlowyError,
services::persistence::version_1::app_sql::AppTable,
entities::{
trash::{TrashPB, TrashType},
view::UpdateViewParams,
},
errors::FlowyError,
services::persistence::version_1::app_sql::AppTable,
};
use diesel::sql_types::Integer;
use flowy_sqlite::{
prelude::*,
schema::{view_table, view_table::dsl},
SqliteConnection,
prelude::*,
schema::{view_table, view_table::dsl},
SqliteConnection,
};
use folder_model::{ViewDataFormatRevision, ViewLayoutTypeRevision, ViewRevision};
@ -18,200 +18,209 @@ use lib_infra::util::timestamp;
pub struct ViewTableSql();
impl ViewTableSql {
pub(crate) fn create_view(view_rev: ViewRevision, conn: &SqliteConnection) -> Result<(), FlowyError> {
let view_table = ViewTable::new(view_rev);
match diesel_record_count!(view_table, &view_table.id, conn) {
0 => diesel_insert_table!(view_table, view_table.clone(), conn),
_ => {
let changeset = ViewChangeset::from_table(view_table);
diesel_update_table!(view_table, changeset, conn)
}
}
Ok(())
pub(crate) fn create_view(
view_rev: ViewRevision,
conn: &SqliteConnection,
) -> Result<(), FlowyError> {
let view_table = ViewTable::new(view_rev);
match diesel_record_count!(view_table, &view_table.id, conn) {
0 => diesel_insert_table!(view_table, view_table.clone(), conn),
_ => {
let changeset = ViewChangeset::from_table(view_table);
diesel_update_table!(view_table, changeset, conn)
},
}
Ok(())
}
pub(crate) fn read_view(view_id: &str, conn: &SqliteConnection) -> Result<ViewTable, FlowyError> {
// https://docs.diesel.rs/diesel/query_builder/struct.UpdateStatement.html
// let mut filter =
// dsl::view_table.filter(view_table::id.eq(view_id)).into_boxed();
// if let Some(is_trash) = is_trash {
// filter = filter.filter(view_table::is_trash.eq(is_trash));
// }
// let repeated_view = filter.first::<ViewTable>(conn)?;
let view_table = dsl::view_table
.filter(view_table::id.eq(view_id))
.first::<ViewTable>(conn)?;
pub(crate) fn read_view(view_id: &str, conn: &SqliteConnection) -> Result<ViewTable, FlowyError> {
// https://docs.diesel.rs/diesel/query_builder/struct.UpdateStatement.html
// let mut filter =
// dsl::view_table.filter(view_table::id.eq(view_id)).into_boxed();
// if let Some(is_trash) = is_trash {
// filter = filter.filter(view_table::is_trash.eq(is_trash));
// }
// let repeated_view = filter.first::<ViewTable>(conn)?;
let view_table = dsl::view_table
.filter(view_table::id.eq(view_id))
.first::<ViewTable>(conn)?;
Ok(view_table)
}
Ok(view_table)
}
// belong_to_id will be the app_id or view_id.
pub(crate) fn read_views(belong_to_id: &str, conn: &SqliteConnection) -> Result<Vec<ViewTable>, FlowyError> {
let view_tables = dsl::view_table
.filter(view_table::belong_to_id.eq(belong_to_id))
.order(view_table::create_time.asc())
.into_boxed()
.load::<ViewTable>(conn)?;
// belong_to_id will be the app_id or view_id.
pub(crate) fn read_views(
belong_to_id: &str,
conn: &SqliteConnection,
) -> Result<Vec<ViewTable>, FlowyError> {
let view_tables = dsl::view_table
.filter(view_table::belong_to_id.eq(belong_to_id))
.order(view_table::create_time.asc())
.into_boxed()
.load::<ViewTable>(conn)?;
Ok(view_tables)
}
Ok(view_tables)
}
pub(crate) fn update_view(changeset: ViewChangeset, conn: &SqliteConnection) -> Result<(), FlowyError> {
diesel_update_table!(view_table, changeset, conn);
Ok(())
}
pub(crate) fn update_view(
changeset: ViewChangeset,
conn: &SqliteConnection,
) -> Result<(), FlowyError> {
diesel_update_table!(view_table, changeset, conn);
Ok(())
}
pub(crate) fn delete_view(view_id: &str, conn: &SqliteConnection) -> Result<(), FlowyError> {
diesel_delete_table!(view_table, view_id, conn);
Ok(())
}
pub(crate) fn delete_view(view_id: &str, conn: &SqliteConnection) -> Result<(), FlowyError> {
diesel_delete_table!(view_table, view_id, conn);
Ok(())
}
}
#[derive(PartialEq, Clone, Debug, Queryable, Identifiable, Insertable, Associations)]
#[belongs_to(AppTable, foreign_key = "belong_to_id")]
#[table_name = "view_table"]
pub(crate) struct ViewTable {
pub id: String,
pub belong_to_id: String,
pub name: String,
pub desc: String,
pub modified_time: i64,
pub create_time: i64,
pub thumbnail: String,
pub view_type: SqlViewDataFormat,
pub version: i64,
pub is_trash: bool,
pub ext_data: String,
pub id: String,
pub belong_to_id: String,
pub name: String,
pub desc: String,
pub modified_time: i64,
pub create_time: i64,
pub thumbnail: String,
pub view_type: SqlViewDataFormat,
pub version: i64,
pub is_trash: bool,
pub ext_data: String,
}
impl ViewTable {
pub fn new(view_rev: ViewRevision) -> Self {
let data_type = match view_rev.data_format {
ViewDataFormatRevision::DeltaFormat => SqlViewDataFormat::Delta,
ViewDataFormatRevision::DatabaseFormat => SqlViewDataFormat::Database,
ViewDataFormatRevision::NodeFormat => SqlViewDataFormat::Tree,
};
pub fn new(view_rev: ViewRevision) -> Self {
let data_type = match view_rev.data_format {
ViewDataFormatRevision::DeltaFormat => SqlViewDataFormat::Delta,
ViewDataFormatRevision::DatabaseFormat => SqlViewDataFormat::Database,
ViewDataFormatRevision::NodeFormat => SqlViewDataFormat::Tree,
};
ViewTable {
id: view_rev.id,
belong_to_id: view_rev.app_id,
name: view_rev.name,
desc: view_rev.desc,
modified_time: view_rev.modified_time,
create_time: view_rev.create_time,
thumbnail: view_rev.thumbnail,
view_type: data_type,
ext_data: view_rev.ext_data,
version: view_rev.version,
is_trash: false,
}
ViewTable {
id: view_rev.id,
belong_to_id: view_rev.app_id,
name: view_rev.name,
desc: view_rev.desc,
modified_time: view_rev.modified_time,
create_time: view_rev.create_time,
thumbnail: view_rev.thumbnail,
view_type: data_type,
ext_data: view_rev.ext_data,
version: view_rev.version,
is_trash: false,
}
}
}
impl std::convert::From<ViewTable> for ViewRevision {
fn from(table: ViewTable) -> Self {
let data_type = match table.view_type {
SqlViewDataFormat::Delta => ViewDataFormatRevision::DeltaFormat,
SqlViewDataFormat::Database => ViewDataFormatRevision::DatabaseFormat,
SqlViewDataFormat::Tree => ViewDataFormatRevision::NodeFormat,
};
fn from(table: ViewTable) -> Self {
let data_type = match table.view_type {
SqlViewDataFormat::Delta => ViewDataFormatRevision::DeltaFormat,
SqlViewDataFormat::Database => ViewDataFormatRevision::DatabaseFormat,
SqlViewDataFormat::Tree => ViewDataFormatRevision::NodeFormat,
};
ViewRevision {
id: table.id,
app_id: table.belong_to_id,
name: table.name,
desc: table.desc,
data_format: data_type,
belongings: vec![],
modified_time: table.modified_time,
version: table.version,
create_time: table.create_time,
ext_data: "".to_string(),
thumbnail: table.thumbnail,
// Store the view in ViewTable was deprecated since v0.0.2.
// No need to worry about layout.
layout: ViewLayoutTypeRevision::Document,
}
ViewRevision {
id: table.id,
app_id: table.belong_to_id,
name: table.name,
desc: table.desc,
data_format: data_type,
belongings: vec![],
modified_time: table.modified_time,
version: table.version,
create_time: table.create_time,
ext_data: "".to_string(),
thumbnail: table.thumbnail,
// Store the view in ViewTable was deprecated since v0.0.2.
// No need to worry about layout.
layout: ViewLayoutTypeRevision::Document,
}
}
}
impl std::convert::From<ViewTable> for TrashPB {
fn from(table: ViewTable) -> Self {
TrashPB {
id: table.id,
name: table.name,
modified_time: table.modified_time,
create_time: table.create_time,
ty: TrashType::TrashView,
}
fn from(table: ViewTable) -> Self {
TrashPB {
id: table.id,
name: table.name,
modified_time: table.modified_time,
create_time: table.create_time,
ty: TrashType::TrashView,
}
}
}
#[derive(AsChangeset, Identifiable, Clone, Default, Debug)]
#[table_name = "view_table"]
pub struct ViewChangeset {
pub id: String,
pub name: Option<String>,
pub desc: Option<String>,
pub thumbnail: Option<String>,
pub modified_time: i64,
pub id: String,
pub name: Option<String>,
pub desc: Option<String>,
pub thumbnail: Option<String>,
pub modified_time: i64,
}
impl ViewChangeset {
pub(crate) fn new(params: UpdateViewParams) -> Self {
ViewChangeset {
id: params.view_id,
name: params.name,
desc: params.desc,
thumbnail: params.thumbnail,
modified_time: timestamp(),
}
pub(crate) fn new(params: UpdateViewParams) -> Self {
ViewChangeset {
id: params.view_id,
name: params.name,
desc: params.desc,
thumbnail: params.thumbnail,
modified_time: timestamp(),
}
}
pub(crate) fn from_table(table: ViewTable) -> Self {
ViewChangeset {
id: table.id,
name: Some(table.name),
desc: Some(table.desc),
thumbnail: Some(table.thumbnail),
modified_time: table.modified_time,
}
pub(crate) fn from_table(table: ViewTable) -> Self {
ViewChangeset {
id: table.id,
name: Some(table.name),
desc: Some(table.desc),
thumbnail: Some(table.thumbnail),
modified_time: table.modified_time,
}
}
}
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash, FromSqlRow, AsExpression)]
#[repr(i32)]
#[sql_type = "Integer"]
pub enum SqlViewDataFormat {
Delta = 0,
Database = 1,
Tree = 2,
Delta = 0,
Database = 1,
Tree = 2,
}
impl std::default::Default for SqlViewDataFormat {
fn default() -> Self {
SqlViewDataFormat::Delta
}
fn default() -> Self {
SqlViewDataFormat::Delta
}
}
impl std::convert::From<i32> for SqlViewDataFormat {
fn from(value: i32) -> Self {
match value {
0 => SqlViewDataFormat::Delta,
1 => SqlViewDataFormat::Database,
2 => SqlViewDataFormat::Tree,
o => {
log::error!("Unsupported view type {}, fallback to ViewType::Block", o);
SqlViewDataFormat::Delta
}
}
fn from(value: i32) -> Self {
match value {
0 => SqlViewDataFormat::Delta,
1 => SqlViewDataFormat::Database,
2 => SqlViewDataFormat::Tree,
o => {
log::error!("Unsupported view type {}, fallback to ViewType::Block", o);
SqlViewDataFormat::Delta
},
}
}
}
impl SqlViewDataFormat {
pub fn value(&self) -> i32 {
*self as i32
}
pub fn value(&self) -> i32 {
*self as i32
}
}
impl_sql_integer_expression!(SqlViewDataFormat);

View File

@ -1,123 +1,129 @@
use crate::{entities::workspace::UpdateWorkspaceParams, errors::FlowyError};
use diesel::SqliteConnection;
use flowy_sqlite::{
prelude::*,
schema::{workspace_table, workspace_table::dsl},
prelude::*,
schema::{workspace_table, workspace_table::dsl},
};
use folder_model::WorkspaceRevision;
pub(crate) struct WorkspaceTableSql();
impl WorkspaceTableSql {
pub(crate) fn create_workspace(
user_id: &str,
workspace_rev: WorkspaceRevision,
conn: &SqliteConnection,
) -> Result<(), FlowyError> {
let table = WorkspaceTable::new(workspace_rev, user_id);
match diesel_record_count!(workspace_table, &table.id, conn) {
0 => diesel_insert_table!(workspace_table, table.clone(), conn),
_ => {
let changeset = WorkspaceChangeset::from_table(table);
diesel_update_table!(workspace_table, changeset, conn);
}
}
Ok(())
}
pub(crate) fn read_workspaces(
user_id: &str,
workspace_id: Option<String>,
conn: &SqliteConnection,
) -> Result<Vec<WorkspaceTable>, FlowyError> {
let mut filter = dsl::workspace_table
.filter(workspace_table::user_id.eq(user_id))
.order(workspace_table::create_time.asc())
.into_boxed();
if let Some(workspace_id) = workspace_id {
filter = filter.filter(workspace_table::id.eq(workspace_id));
};
let workspaces = filter.load::<WorkspaceTable>(conn)?;
Ok(workspaces)
}
#[allow(dead_code)]
pub(crate) fn update_workspace(changeset: WorkspaceChangeset, conn: &SqliteConnection) -> Result<(), FlowyError> {
pub(crate) fn create_workspace(
user_id: &str,
workspace_rev: WorkspaceRevision,
conn: &SqliteConnection,
) -> Result<(), FlowyError> {
let table = WorkspaceTable::new(workspace_rev, user_id);
match diesel_record_count!(workspace_table, &table.id, conn) {
0 => diesel_insert_table!(workspace_table, table.clone(), conn),
_ => {
let changeset = WorkspaceChangeset::from_table(table);
diesel_update_table!(workspace_table, changeset, conn);
Ok(())
},
}
Ok(())
}
#[allow(dead_code)]
pub(crate) fn delete_workspace(workspace_id: &str, conn: &SqliteConnection) -> Result<(), FlowyError> {
diesel_delete_table!(workspace_table, workspace_id, conn);
Ok(())
}
pub(crate) fn read_workspaces(
user_id: &str,
workspace_id: Option<String>,
conn: &SqliteConnection,
) -> Result<Vec<WorkspaceTable>, FlowyError> {
let mut filter = dsl::workspace_table
.filter(workspace_table::user_id.eq(user_id))
.order(workspace_table::create_time.asc())
.into_boxed();
if let Some(workspace_id) = workspace_id {
filter = filter.filter(workspace_table::id.eq(workspace_id));
};
let workspaces = filter.load::<WorkspaceTable>(conn)?;
Ok(workspaces)
}
#[allow(dead_code)]
pub(crate) fn update_workspace(
changeset: WorkspaceChangeset,
conn: &SqliteConnection,
) -> Result<(), FlowyError> {
diesel_update_table!(workspace_table, changeset, conn);
Ok(())
}
#[allow(dead_code)]
pub(crate) fn delete_workspace(
workspace_id: &str,
conn: &SqliteConnection,
) -> Result<(), FlowyError> {
diesel_delete_table!(workspace_table, workspace_id, conn);
Ok(())
}
}
#[derive(PartialEq, Eq, Clone, Debug, Queryable, Identifiable, Insertable)]
#[table_name = "workspace_table"]
pub struct WorkspaceTable {
pub id: String,
pub name: String,
pub desc: String,
pub modified_time: i64,
pub create_time: i64,
pub user_id: String,
pub version: i64,
pub id: String,
pub name: String,
pub desc: String,
pub modified_time: i64,
pub create_time: i64,
pub user_id: String,
pub version: i64,
}
impl WorkspaceTable {
#[allow(dead_code)]
pub fn new(workspace_rev: WorkspaceRevision, user_id: &str) -> Self {
WorkspaceTable {
id: workspace_rev.id,
name: workspace_rev.name,
desc: workspace_rev.desc,
modified_time: workspace_rev.modified_time,
create_time: workspace_rev.create_time,
user_id: user_id.to_owned(),
version: 0,
}
#[allow(dead_code)]
pub fn new(workspace_rev: WorkspaceRevision, user_id: &str) -> Self {
WorkspaceTable {
id: workspace_rev.id,
name: workspace_rev.name,
desc: workspace_rev.desc,
modified_time: workspace_rev.modified_time,
create_time: workspace_rev.create_time,
user_id: user_id.to_owned(),
version: 0,
}
}
}
impl std::convert::From<WorkspaceTable> for WorkspaceRevision {
fn from(table: WorkspaceTable) -> Self {
WorkspaceRevision {
id: table.id,
name: table.name,
desc: table.desc,
apps: vec![],
modified_time: table.modified_time,
create_time: table.create_time,
}
fn from(table: WorkspaceTable) -> Self {
WorkspaceRevision {
id: table.id,
name: table.name,
desc: table.desc,
apps: vec![],
modified_time: table.modified_time,
create_time: table.create_time,
}
}
}
#[derive(AsChangeset, Identifiable, Clone, Default, Debug)]
#[table_name = "workspace_table"]
pub struct WorkspaceChangeset {
pub id: String,
pub name: Option<String>,
pub desc: Option<String>,
pub id: String,
pub name: Option<String>,
pub desc: Option<String>,
}
impl WorkspaceChangeset {
pub fn new(params: UpdateWorkspaceParams) -> Self {
WorkspaceChangeset {
id: params.id,
name: params.name,
desc: params.desc,
}
pub fn new(params: UpdateWorkspaceParams) -> Self {
WorkspaceChangeset {
id: params.id,
name: params.name,
desc: params.desc,
}
}
pub(crate) fn from_table(table: WorkspaceTable) -> Self {
WorkspaceChangeset {
id: table.id,
name: Some(table.name),
desc: Some(table.desc),
}
pub(crate) fn from_table(table: WorkspaceTable) -> Self {
WorkspaceChangeset {
id: table.id,
name: Some(table.name),
desc: Some(table.desc),
}
}
}

View File

@ -1,230 +1,245 @@
use crate::services::{
folder_editor::FolderEditor,
persistence::{AppChangeset, FolderPersistenceTransaction, ViewChangeset, WorkspaceChangeset},
folder_editor::FolderEditor,
persistence::{AppChangeset, FolderPersistenceTransaction, ViewChangeset, WorkspaceChangeset},
};
use flowy_error::{FlowyError, FlowyResult};
use folder_model::{AppRevision, TrashRevision, ViewRevision, WorkspaceRevision};
use std::sync::Arc;
impl FolderPersistenceTransaction for FolderEditor {
fn create_workspace(&self, _user_id: &str, workspace_rev: WorkspaceRevision) -> FlowyResult<()> {
if let Some(change) = self.folder.write().create_workspace(workspace_rev)? {
self.apply_change(change)?;
}
Ok(())
fn create_workspace(&self, _user_id: &str, workspace_rev: WorkspaceRevision) -> FlowyResult<()> {
if let Some(change) = self.folder.write().create_workspace(workspace_rev)? {
self.apply_change(change)?;
}
Ok(())
}
fn read_workspaces(&self, _user_id: &str, workspace_id: Option<String>) -> FlowyResult<Vec<WorkspaceRevision>> {
let workspaces = self.folder.read().read_workspaces(workspace_id)?;
Ok(workspaces)
}
fn read_workspaces(
&self,
_user_id: &str,
workspace_id: Option<String>,
) -> FlowyResult<Vec<WorkspaceRevision>> {
let workspaces = self.folder.read().read_workspaces(workspace_id)?;
Ok(workspaces)
}
fn update_workspace(&self, changeset: WorkspaceChangeset) -> FlowyResult<()> {
if let Some(change) = self
.folder
.write()
.update_workspace(&changeset.id, changeset.name, changeset.desc)?
{
self.apply_change(change)?;
}
Ok(())
fn update_workspace(&self, changeset: WorkspaceChangeset) -> FlowyResult<()> {
if let Some(change) =
self
.folder
.write()
.update_workspace(&changeset.id, changeset.name, changeset.desc)?
{
self.apply_change(change)?;
}
Ok(())
}
fn delete_workspace(&self, workspace_id: &str) -> FlowyResult<()> {
if let Some(change) = self.folder.write().delete_workspace(workspace_id)? {
self.apply_change(change)?;
}
Ok(())
fn delete_workspace(&self, workspace_id: &str) -> FlowyResult<()> {
if let Some(change) = self.folder.write().delete_workspace(workspace_id)? {
self.apply_change(change)?;
}
Ok(())
}
fn create_app(&self, app_rev: AppRevision) -> FlowyResult<()> {
if let Some(change) = self.folder.write().create_app(app_rev)? {
self.apply_change(change)?;
}
Ok(())
fn create_app(&self, app_rev: AppRevision) -> FlowyResult<()> {
if let Some(change) = self.folder.write().create_app(app_rev)? {
self.apply_change(change)?;
}
Ok(())
}
fn update_app(&self, changeset: AppChangeset) -> FlowyResult<()> {
if let Some(change) = self
.folder
.write()
.update_app(&changeset.id, changeset.name, changeset.desc)?
{
self.apply_change(change)?;
}
Ok(())
fn update_app(&self, changeset: AppChangeset) -> FlowyResult<()> {
if let Some(change) =
self
.folder
.write()
.update_app(&changeset.id, changeset.name, changeset.desc)?
{
self.apply_change(change)?;
}
Ok(())
}
fn read_app(&self, app_id: &str) -> FlowyResult<AppRevision> {
let app = self.folder.read().read_app(app_id)?;
Ok(app)
}
fn read_app(&self, app_id: &str) -> FlowyResult<AppRevision> {
let app = self.folder.read().read_app(app_id)?;
Ok(app)
}
fn read_workspace_apps(&self, workspace_id: &str) -> FlowyResult<Vec<AppRevision>> {
let workspaces = self.folder.read().read_workspaces(Some(workspace_id.to_owned()))?;
match workspaces.first() {
None => {
Err(FlowyError::record_not_found().context(format!("can't find workspace with id {}", workspace_id)))
}
Some(workspace) => Ok(workspace.apps.clone()),
}
fn read_workspace_apps(&self, workspace_id: &str) -> FlowyResult<Vec<AppRevision>> {
let workspaces = self
.folder
.read()
.read_workspaces(Some(workspace_id.to_owned()))?;
match workspaces.first() {
None => Err(
FlowyError::record_not_found()
.context(format!("can't find workspace with id {}", workspace_id)),
),
Some(workspace) => Ok(workspace.apps.clone()),
}
}
fn delete_app(&self, app_id: &str) -> FlowyResult<AppRevision> {
let app = self.folder.read().read_app(app_id)?;
if let Some(change) = self.folder.write().delete_app(app_id)? {
self.apply_change(change)?;
}
Ok(app)
fn delete_app(&self, app_id: &str) -> FlowyResult<AppRevision> {
let app = self.folder.read().read_app(app_id)?;
if let Some(change) = self.folder.write().delete_app(app_id)? {
self.apply_change(change)?;
}
Ok(app)
}
fn move_app(&self, app_id: &str, from: usize, to: usize) -> FlowyResult<()> {
if let Some(change) = self.folder.write().move_app(app_id, from, to)? {
self.apply_change(change)?;
}
Ok(())
fn move_app(&self, app_id: &str, from: usize, to: usize) -> FlowyResult<()> {
if let Some(change) = self.folder.write().move_app(app_id, from, to)? {
self.apply_change(change)?;
}
Ok(())
}
fn create_view(&self, view_rev: ViewRevision) -> FlowyResult<()> {
if let Some(change) = self.folder.write().create_view(view_rev)? {
self.apply_change(change)?;
}
Ok(())
fn create_view(&self, view_rev: ViewRevision) -> FlowyResult<()> {
if let Some(change) = self.folder.write().create_view(view_rev)? {
self.apply_change(change)?;
}
Ok(())
}
fn read_view(&self, view_id: &str) -> FlowyResult<ViewRevision> {
let view = self.folder.read().read_view(view_id)?;
Ok(view)
}
fn read_view(&self, view_id: &str) -> FlowyResult<ViewRevision> {
let view = self.folder.read().read_view(view_id)?;
Ok(view)
}
fn read_views(&self, belong_to_id: &str) -> FlowyResult<Vec<ViewRevision>> {
let views = self.folder.read().read_views(belong_to_id)?;
Ok(views)
}
fn read_views(&self, belong_to_id: &str) -> FlowyResult<Vec<ViewRevision>> {
let views = self.folder.read().read_views(belong_to_id)?;
Ok(views)
}
fn update_view(&self, changeset: ViewChangeset) -> FlowyResult<()> {
if let Some(change) =
self.folder
.write()
.update_view(&changeset.id, changeset.name, changeset.desc, changeset.modified_time)?
{
self.apply_change(change)?;
}
Ok(())
fn update_view(&self, changeset: ViewChangeset) -> FlowyResult<()> {
if let Some(change) = self.folder.write().update_view(
&changeset.id,
changeset.name,
changeset.desc,
changeset.modified_time,
)? {
self.apply_change(change)?;
}
Ok(())
}
fn delete_view(&self, view_id: &str) -> FlowyResult<ViewRevision> {
let view = self.folder.read().read_view(view_id)?;
if let Some(change) = self.folder.write().delete_view(&view.app_id, view_id)? {
self.apply_change(change)?;
}
Ok(view)
fn delete_view(&self, view_id: &str) -> FlowyResult<ViewRevision> {
let view = self.folder.read().read_view(view_id)?;
if let Some(change) = self.folder.write().delete_view(&view.app_id, view_id)? {
self.apply_change(change)?;
}
Ok(view)
}
fn move_view(&self, view_id: &str, from: usize, to: usize) -> FlowyResult<()> {
if let Some(change) = self.folder.write().move_view(view_id, from, to)? {
self.apply_change(change)?;
}
Ok(())
fn move_view(&self, view_id: &str, from: usize, to: usize) -> FlowyResult<()> {
if let Some(change) = self.folder.write().move_view(view_id, from, to)? {
self.apply_change(change)?;
}
Ok(())
}
fn create_trash(&self, trashes: Vec<TrashRevision>) -> FlowyResult<()> {
if let Some(change) = self.folder.write().create_trash(trashes)? {
self.apply_change(change)?;
}
Ok(())
fn create_trash(&self, trashes: Vec<TrashRevision>) -> FlowyResult<()> {
if let Some(change) = self.folder.write().create_trash(trashes)? {
self.apply_change(change)?;
}
Ok(())
}
fn read_trash(&self, trash_id: Option<String>) -> FlowyResult<Vec<TrashRevision>> {
let trash = self.folder.read().read_trash(trash_id)?;
Ok(trash)
}
fn read_trash(&self, trash_id: Option<String>) -> FlowyResult<Vec<TrashRevision>> {
let trash = self.folder.read().read_trash(trash_id)?;
Ok(trash)
}
fn delete_trash(&self, trash_ids: Option<Vec<String>>) -> FlowyResult<()> {
if let Some(change) = self.folder.write().delete_trash(trash_ids)? {
self.apply_change(change)?;
}
Ok(())
fn delete_trash(&self, trash_ids: Option<Vec<String>>) -> FlowyResult<()> {
if let Some(change) = self.folder.write().delete_trash(trash_ids)? {
self.apply_change(change)?;
}
Ok(())
}
}
impl<T> FolderPersistenceTransaction for Arc<T>
where
T: FolderPersistenceTransaction + ?Sized,
T: FolderPersistenceTransaction + ?Sized,
{
fn create_workspace(&self, user_id: &str, workspace_rev: WorkspaceRevision) -> FlowyResult<()> {
(**self).create_workspace(user_id, workspace_rev)
}
fn create_workspace(&self, user_id: &str, workspace_rev: WorkspaceRevision) -> FlowyResult<()> {
(**self).create_workspace(user_id, workspace_rev)
}
fn read_workspaces(&self, user_id: &str, workspace_id: Option<String>) -> FlowyResult<Vec<WorkspaceRevision>> {
(**self).read_workspaces(user_id, workspace_id)
}
fn read_workspaces(
&self,
user_id: &str,
workspace_id: Option<String>,
) -> FlowyResult<Vec<WorkspaceRevision>> {
(**self).read_workspaces(user_id, workspace_id)
}
fn update_workspace(&self, changeset: WorkspaceChangeset) -> FlowyResult<()> {
(**self).update_workspace(changeset)
}
fn update_workspace(&self, changeset: WorkspaceChangeset) -> FlowyResult<()> {
(**self).update_workspace(changeset)
}
fn delete_workspace(&self, workspace_id: &str) -> FlowyResult<()> {
(**self).delete_workspace(workspace_id)
}
fn delete_workspace(&self, workspace_id: &str) -> FlowyResult<()> {
(**self).delete_workspace(workspace_id)
}
fn create_app(&self, app_rev: AppRevision) -> FlowyResult<()> {
(**self).create_app(app_rev)
}
fn create_app(&self, app_rev: AppRevision) -> FlowyResult<()> {
(**self).create_app(app_rev)
}
fn update_app(&self, changeset: AppChangeset) -> FlowyResult<()> {
(**self).update_app(changeset)
}
fn update_app(&self, changeset: AppChangeset) -> FlowyResult<()> {
(**self).update_app(changeset)
}
fn read_app(&self, app_id: &str) -> FlowyResult<AppRevision> {
(**self).read_app(app_id)
}
fn read_app(&self, app_id: &str) -> FlowyResult<AppRevision> {
(**self).read_app(app_id)
}
fn read_workspace_apps(&self, workspace_id: &str) -> FlowyResult<Vec<AppRevision>> {
(**self).read_workspace_apps(workspace_id)
}
fn read_workspace_apps(&self, workspace_id: &str) -> FlowyResult<Vec<AppRevision>> {
(**self).read_workspace_apps(workspace_id)
}
fn delete_app(&self, app_id: &str) -> FlowyResult<AppRevision> {
(**self).delete_app(app_id)
}
fn delete_app(&self, app_id: &str) -> FlowyResult<AppRevision> {
(**self).delete_app(app_id)
}
fn move_app(&self, app_id: &str, from: usize, to: usize) -> FlowyResult<()> {
(**self).move_app(app_id, from, to)
}
fn move_app(&self, app_id: &str, from: usize, to: usize) -> FlowyResult<()> {
(**self).move_app(app_id, from, to)
}
fn create_view(&self, view_rev: ViewRevision) -> FlowyResult<()> {
(**self).create_view(view_rev)
}
fn create_view(&self, view_rev: ViewRevision) -> FlowyResult<()> {
(**self).create_view(view_rev)
}
fn read_view(&self, view_id: &str) -> FlowyResult<ViewRevision> {
(**self).read_view(view_id)
}
fn read_view(&self, view_id: &str) -> FlowyResult<ViewRevision> {
(**self).read_view(view_id)
}
fn read_views(&self, belong_to_id: &str) -> FlowyResult<Vec<ViewRevision>> {
(**self).read_views(belong_to_id)
}
fn read_views(&self, belong_to_id: &str) -> FlowyResult<Vec<ViewRevision>> {
(**self).read_views(belong_to_id)
}
fn update_view(&self, changeset: ViewChangeset) -> FlowyResult<()> {
(**self).update_view(changeset)
}
fn update_view(&self, changeset: ViewChangeset) -> FlowyResult<()> {
(**self).update_view(changeset)
}
fn delete_view(&self, view_id: &str) -> FlowyResult<ViewRevision> {
(**self).delete_view(view_id)
}
fn delete_view(&self, view_id: &str) -> FlowyResult<ViewRevision> {
(**self).delete_view(view_id)
}
fn move_view(&self, view_id: &str, from: usize, to: usize) -> FlowyResult<()> {
(**self).move_view(view_id, from, to)
}
fn move_view(&self, view_id: &str, from: usize, to: usize) -> FlowyResult<()> {
(**self).move_view(view_id, from, to)
}
fn create_trash(&self, trashes: Vec<TrashRevision>) -> FlowyResult<()> {
(**self).create_trash(trashes)
}
fn create_trash(&self, trashes: Vec<TrashRevision>) -> FlowyResult<()> {
(**self).create_trash(trashes)
}
fn read_trash(&self, trash_id: Option<String>) -> FlowyResult<Vec<TrashRevision>> {
(**self).read_trash(trash_id)
}
fn read_trash(&self, trash_id: Option<String>) -> FlowyResult<Vec<TrashRevision>> {
(**self).read_trash(trash_id)
}
fn delete_trash(&self, trash_ids: Option<Vec<String>>) -> FlowyResult<()> {
(**self).delete_trash(trash_ids)
}
fn delete_trash(&self, trash_ids: Option<Vec<String>>) -> FlowyResult<()> {
(**self).delete_trash(trash_ids)
}
}

View File

@ -1,9 +1,9 @@
use crate::{
entities::trash::{RepeatedTrashIdPB, RepeatedTrashPB, TrashIdPB, TrashPB, TrashType},
errors::{FlowyError, FlowyResult},
event_map::{FolderCouldServiceV1, WorkspaceUser},
notification::{send_anonymous_notification, FolderNotification},
services::persistence::{FolderPersistence, FolderPersistenceTransaction},
entities::trash::{RepeatedTrashIdPB, RepeatedTrashPB, TrashIdPB, TrashPB, TrashType},
errors::{FlowyError, FlowyResult},
event_map::{FolderCouldServiceV1, WorkspaceUser},
notification::{send_anonymous_notification, FolderNotification},
services::persistence::{FolderPersistence, FolderPersistenceTransaction},
};
use folder_model::TrashRevision;
@ -11,250 +11,275 @@ use std::{fmt::Formatter, sync::Arc};
use tokio::sync::{broadcast, mpsc};
pub struct TrashController {
persistence: Arc<FolderPersistence>,
notify: broadcast::Sender<TrashEvent>,
#[allow(dead_code)]
cloud_service: Arc<dyn FolderCouldServiceV1>,
#[allow(dead_code)]
user: Arc<dyn WorkspaceUser>,
persistence: Arc<FolderPersistence>,
notify: broadcast::Sender<TrashEvent>,
#[allow(dead_code)]
cloud_service: Arc<dyn FolderCouldServiceV1>,
#[allow(dead_code)]
user: Arc<dyn WorkspaceUser>,
}
impl TrashController {
pub fn new(
persistence: Arc<FolderPersistence>,
cloud_service: Arc<dyn FolderCouldServiceV1>,
user: Arc<dyn WorkspaceUser>,
) -> Self {
let (tx, _) = broadcast::channel(10);
Self {
persistence,
notify: tx,
cloud_service,
user,
pub fn new(
persistence: Arc<FolderPersistence>,
cloud_service: Arc<dyn FolderCouldServiceV1>,
user: Arc<dyn WorkspaceUser>,
) -> Self {
let (tx, _) = broadcast::channel(10);
Self {
persistence,
notify: tx,
cloud_service,
user,
}
}
#[tracing::instrument(level = "debug", skip(self), fields(putback) err)]
pub async fn putback(&self, trash_id: &str) -> FlowyResult<()> {
let (tx, mut rx) = mpsc::channel::<FlowyResult<()>>(1);
let trash = self
.persistence
.begin_transaction(|transaction| {
let mut repeated_trash = transaction.read_trash(Some(trash_id.to_owned()))?;
transaction.delete_trash(Some(vec![trash_id.to_owned()]))?;
notify_trash_changed(transaction.read_trash(None)?);
if repeated_trash.is_empty() {
return Err(FlowyError::internal().context("Try to put back trash is not exists"));
}
Ok(repeated_trash.pop().unwrap())
})
.await?;
let identifier = TrashIdPB {
id: trash.id,
ty: trash.ty.into(),
};
tracing::Span::current().record("putback", format!("{:?}", &identifier).as_str());
let _ = self
.notify
.send(TrashEvent::Putback(vec![identifier].into(), tx));
rx.recv().await.unwrap()?;
Ok(())
}
#[tracing::instrument(level = "debug", skip(self) err)]
pub async fn restore_all_trash(&self) -> FlowyResult<()> {
let trash_identifier: RepeatedTrashIdPB = self
.persistence
.begin_transaction(|transaction| {
let trash = transaction.read_trash(None);
let _ = transaction.delete_trash(None);
trash
})
.await?
.into();
let (tx, mut rx) = mpsc::channel::<FlowyResult<()>>(1);
let _ = self.notify.send(TrashEvent::Putback(trash_identifier, tx));
let _ = rx.recv().await;
notify_trash_changed(RepeatedTrashPB { items: vec![] });
Ok(())
}
#[tracing::instrument(level = "debug", skip(self), err)]
pub async fn delete_all_trash(&self) -> FlowyResult<()> {
let all_trash_identifiers: RepeatedTrashIdPB = self
.persistence
.begin_transaction(|transaction| transaction.read_trash(None))
.await?
.into();
self.delete_with_identifiers(all_trash_identifiers).await?;
notify_trash_changed(RepeatedTrashPB { items: vec![] });
Ok(())
}
#[tracing::instrument(level = "debug", skip(self), err)]
pub async fn delete(&self, trash_identifiers: RepeatedTrashIdPB) -> FlowyResult<()> {
self
.delete_with_identifiers(trash_identifiers.clone())
.await?;
let trash_revs = self
.persistence
.begin_transaction(|transaction| transaction.read_trash(None))
.await?;
notify_trash_changed(trash_revs);
Ok(())
}
#[tracing::instrument(level = "debug", skip(self), fields(delete_trash_ids), err)]
pub async fn delete_with_identifiers(
&self,
trash_identifiers: RepeatedTrashIdPB,
) -> FlowyResult<()> {
let (tx, mut rx) = mpsc::channel::<FlowyResult<()>>(1);
tracing::Span::current().record(
"delete_trash_ids",
format!("{}", trash_identifiers).as_str(),
);
let _ = self
.notify
.send(TrashEvent::Delete(trash_identifiers.clone(), tx));
match rx.recv().await {
None => {},
Some(result) => match result {
Ok(_) => {},
Err(e) => log::error!("{}", e),
},
}
self
.persistence
.begin_transaction(|transaction| {
let ids = trash_identifiers
.items
.into_iter()
.map(|item| item.id)
.collect::<Vec<_>>();
transaction.delete_trash(Some(ids))
})
.await?;
#[tracing::instrument(level = "debug", skip(self), fields(putback) err)]
pub async fn putback(&self, trash_id: &str) -> FlowyResult<()> {
let (tx, mut rx) = mpsc::channel::<FlowyResult<()>>(1);
let trash = self
.persistence
.begin_transaction(|transaction| {
let mut repeated_trash = transaction.read_trash(Some(trash_id.to_owned()))?;
transaction.delete_trash(Some(vec![trash_id.to_owned()]))?;
notify_trash_changed(transaction.read_trash(None)?);
Ok(())
}
if repeated_trash.is_empty() {
return Err(FlowyError::internal().context("Try to put back trash is not exists"));
}
Ok(repeated_trash.pop().unwrap())
})
.await?;
// [[ transaction ]]
// https://www.tutlane.com/tutorial/sqlite/sqlite-transactions-begin-commit-rollback
// We can use these commands only when we are performing INSERT, UPDATE, and
// DELETE operations. Its not possible for us to use these commands to
// CREATE and DROP tables operations because those are auto-commit in the
// database.
#[tracing::instrument(
name = "add_trash",
level = "debug",
skip(self, trash),
fields(trash_ids),
err
)]
pub async fn add<T: Into<TrashRevision>>(&self, trash: Vec<T>) -> Result<(), FlowyError> {
let (tx, mut rx) = mpsc::channel::<FlowyResult<()>>(1);
let trash_revs: Vec<TrashRevision> = trash.into_iter().map(|t| t.into()).collect();
let identifiers = trash_revs
.iter()
.map(|t| t.into())
.collect::<Vec<TrashIdPB>>();
let identifier = TrashIdPB {
id: trash.id,
ty: trash.ty.into(),
};
tracing::Span::current().record(
"trash_ids",
format!(
"{:?}",
identifiers
.iter()
.map(|identifier| format!("{:?}:{}", identifier.ty, identifier.id))
.collect::<Vec<_>>()
)
.as_str(),
);
tracing::Span::current().record("putback", format!("{:?}", &identifier).as_str());
let _ = self.notify.send(TrashEvent::Putback(vec![identifier].into(), tx));
rx.recv().await.unwrap()?;
self
.persistence
.begin_transaction(|transaction| {
transaction.create_trash(trash_revs.clone())?;
notify_trash_changed(transaction.read_trash(None)?);
Ok(())
}
})
.await?;
let _ = self
.notify
.send(TrashEvent::NewTrash(identifiers.into(), tx));
rx.recv().await.unwrap()?;
#[tracing::instrument(level = "debug", skip(self) err)]
pub async fn restore_all_trash(&self) -> FlowyResult<()> {
let trash_identifier: RepeatedTrashIdPB = self
.persistence
.begin_transaction(|transaction| {
let trash = transaction.read_trash(None);
let _ = transaction.delete_trash(None);
trash
})
.await?
.into();
Ok(())
}
let (tx, mut rx) = mpsc::channel::<FlowyResult<()>>(1);
let _ = self.notify.send(TrashEvent::Putback(trash_identifier, tx));
let _ = rx.recv().await;
pub fn subscribe(&self) -> broadcast::Receiver<TrashEvent> {
self.notify.subscribe()
}
notify_trash_changed(RepeatedTrashPB { items: vec![] });
Ok(())
}
pub async fn read_trash(&self) -> Result<RepeatedTrashPB, FlowyError> {
let items: Vec<TrashPB> = self
.persistence
.begin_transaction(|transaction| transaction.read_trash(None))
.await?
.into_iter()
.map(|trash_rev| trash_rev.into())
.collect();
#[tracing::instrument(level = "debug", skip(self), err)]
pub async fn delete_all_trash(&self) -> FlowyResult<()> {
let all_trash_identifiers: RepeatedTrashIdPB = self
.persistence
.begin_transaction(|transaction| transaction.read_trash(None))
.await?
.into();
Ok(RepeatedTrashPB { items })
}
self.delete_with_identifiers(all_trash_identifiers).await?;
notify_trash_changed(RepeatedTrashPB { items: vec![] });
Ok(())
}
#[tracing::instrument(level = "debug", skip(self), err)]
pub async fn delete(&self, trash_identifiers: RepeatedTrashIdPB) -> FlowyResult<()> {
self.delete_with_identifiers(trash_identifiers.clone()).await?;
let trash_revs = self
.persistence
.begin_transaction(|transaction| transaction.read_trash(None))
.await?;
notify_trash_changed(trash_revs);
Ok(())
}
#[tracing::instrument(level = "debug", skip(self), fields(delete_trash_ids), err)]
pub async fn delete_with_identifiers(&self, trash_identifiers: RepeatedTrashIdPB) -> FlowyResult<()> {
let (tx, mut rx) = mpsc::channel::<FlowyResult<()>>(1);
tracing::Span::current().record("delete_trash_ids", format!("{}", trash_identifiers).as_str());
let _ = self.notify.send(TrashEvent::Delete(trash_identifiers.clone(), tx));
match rx.recv().await {
None => {}
Some(result) => match result {
Ok(_) => {}
Err(e) => log::error!("{}", e),
},
}
self.persistence
.begin_transaction(|transaction| {
let ids = trash_identifiers
.items
.into_iter()
.map(|item| item.id)
.collect::<Vec<_>>();
transaction.delete_trash(Some(ids))
})
.await?;
Ok(())
}
// [[ transaction ]]
// https://www.tutlane.com/tutorial/sqlite/sqlite-transactions-begin-commit-rollback
// We can use these commands only when we are performing INSERT, UPDATE, and
// DELETE operations. Its not possible for us to use these commands to
// CREATE and DROP tables operations because those are auto-commit in the
// database.
#[tracing::instrument(name = "add_trash", level = "debug", skip(self, trash), fields(trash_ids), err)]
pub async fn add<T: Into<TrashRevision>>(&self, trash: Vec<T>) -> Result<(), FlowyError> {
let (tx, mut rx) = mpsc::channel::<FlowyResult<()>>(1);
let trash_revs: Vec<TrashRevision> = trash.into_iter().map(|t| t.into()).collect();
let identifiers = trash_revs.iter().map(|t| t.into()).collect::<Vec<TrashIdPB>>();
tracing::Span::current().record(
"trash_ids",
format!(
"{:?}",
identifiers
.iter()
.map(|identifier| format!("{:?}:{}", identifier.ty, identifier.id))
.collect::<Vec<_>>()
)
.as_str(),
);
self.persistence
.begin_transaction(|transaction| {
transaction.create_trash(trash_revs.clone())?;
notify_trash_changed(transaction.read_trash(None)?);
Ok(())
})
.await?;
let _ = self.notify.send(TrashEvent::NewTrash(identifiers.into(), tx));
rx.recv().await.unwrap()?;
Ok(())
}
pub fn subscribe(&self) -> broadcast::Receiver<TrashEvent> {
self.notify.subscribe()
}
pub async fn read_trash(&self) -> Result<RepeatedTrashPB, FlowyError> {
let items: Vec<TrashPB> = self
.persistence
.begin_transaction(|transaction| transaction.read_trash(None))
.await?
.into_iter()
.map(|trash_rev| trash_rev.into())
.collect();
Ok(RepeatedTrashPB { items })
}
pub fn read_trash_ids<'a>(
&self,
transaction: &'a (dyn FolderPersistenceTransaction + 'a),
) -> Result<Vec<String>, FlowyError> {
let ids = transaction
.read_trash(None)?
.into_iter()
.map(|item| item.id)
.collect::<Vec<String>>();
Ok(ids)
}
pub fn read_trash_ids<'a>(
&self,
transaction: &'a (dyn FolderPersistenceTransaction + 'a),
) -> Result<Vec<String>, FlowyError> {
let ids = transaction
.read_trash(None)?
.into_iter()
.map(|item| item.id)
.collect::<Vec<String>>();
Ok(ids)
}
}
#[tracing::instrument(level = "debug", skip(repeated_trash), fields(n_trash))]
fn notify_trash_changed<T: Into<RepeatedTrashPB>>(repeated_trash: T) {
let repeated_trash = repeated_trash.into();
tracing::Span::current().record("n_trash", repeated_trash.len());
send_anonymous_notification(FolderNotification::DidUpdateTrash)
.payload(repeated_trash)
.send();
let repeated_trash = repeated_trash.into();
tracing::Span::current().record("n_trash", repeated_trash.len());
send_anonymous_notification(FolderNotification::DidUpdateTrash)
.payload(repeated_trash)
.send();
}
#[derive(Clone)]
pub enum TrashEvent {
NewTrash(RepeatedTrashIdPB, mpsc::Sender<FlowyResult<()>>),
Putback(RepeatedTrashIdPB, mpsc::Sender<FlowyResult<()>>),
Delete(RepeatedTrashIdPB, mpsc::Sender<FlowyResult<()>>),
NewTrash(RepeatedTrashIdPB, mpsc::Sender<FlowyResult<()>>),
Putback(RepeatedTrashIdPB, mpsc::Sender<FlowyResult<()>>),
Delete(RepeatedTrashIdPB, mpsc::Sender<FlowyResult<()>>),
}
impl std::fmt::Debug for TrashEvent {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
TrashEvent::NewTrash(identifiers, _) => f.write_str(&format!("{:?}", identifiers)),
TrashEvent::Putback(identifiers, _) => f.write_str(&format!("{:?}", identifiers)),
TrashEvent::Delete(identifiers, _) => f.write_str(&format!("{:?}", identifiers)),
}
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
TrashEvent::NewTrash(identifiers, _) => f.write_str(&format!("{:?}", identifiers)),
TrashEvent::Putback(identifiers, _) => f.write_str(&format!("{:?}", identifiers)),
TrashEvent::Delete(identifiers, _) => f.write_str(&format!("{:?}", identifiers)),
}
}
}
impl TrashEvent {
pub fn select(self, s: TrashType) -> Option<TrashEvent> {
match self {
TrashEvent::Putback(mut identifiers, sender) => {
identifiers.items.retain(|item| item.ty == s);
if identifiers.items.is_empty() {
None
} else {
Some(TrashEvent::Putback(identifiers, sender))
}
}
TrashEvent::Delete(mut identifiers, sender) => {
identifiers.items.retain(|item| item.ty == s);
if identifiers.items.is_empty() {
None
} else {
Some(TrashEvent::Delete(identifiers, sender))
}
}
TrashEvent::NewTrash(mut identifiers, sender) => {
identifiers.items.retain(|item| item.ty == s);
if identifiers.items.is_empty() {
None
} else {
Some(TrashEvent::NewTrash(identifiers, sender))
}
}
pub fn select(self, s: TrashType) -> Option<TrashEvent> {
match self {
TrashEvent::Putback(mut identifiers, sender) => {
identifiers.items.retain(|item| item.ty == s);
if identifiers.items.is_empty() {
None
} else {
Some(TrashEvent::Putback(identifiers, sender))
}
},
TrashEvent::Delete(mut identifiers, sender) => {
identifiers.items.retain(|item| item.ty == s);
if identifiers.items.is_empty() {
None
} else {
Some(TrashEvent::Delete(identifiers, sender))
}
},
TrashEvent::NewTrash(mut identifiers, sender) => {
identifiers.items.retain(|item| item.ty == s);
if identifiers.items.is_empty() {
None
} else {
Some(TrashEvent::NewTrash(identifiers, sender))
}
},
}
}
}

View File

@ -1,49 +1,49 @@
use crate::{
entities::trash::{RepeatedTrashIdPB, RepeatedTrashPB, TrashIdPB},
errors::FlowyError,
services::TrashController,
entities::trash::{RepeatedTrashIdPB, RepeatedTrashPB, TrashIdPB},
errors::FlowyError,
services::TrashController,
};
use lib_dispatch::prelude::{data_result, AFPluginData, AFPluginState, DataResult};
use std::sync::Arc;
#[tracing::instrument(level = "debug", skip(controller), err)]
pub(crate) async fn read_trash_handler(
controller: AFPluginState<Arc<TrashController>>,
controller: AFPluginState<Arc<TrashController>>,
) -> DataResult<RepeatedTrashPB, FlowyError> {
let repeated_trash = controller.read_trash().await?;
data_result(repeated_trash)
let repeated_trash = controller.read_trash().await?;
data_result(repeated_trash)
}
#[tracing::instrument(level = "debug", skip(identifier, controller), err)]
pub(crate) async fn putback_trash_handler(
identifier: AFPluginData<TrashIdPB>,
controller: AFPluginState<Arc<TrashController>>,
identifier: AFPluginData<TrashIdPB>,
controller: AFPluginState<Arc<TrashController>>,
) -> Result<(), FlowyError> {
controller.putback(&identifier.id).await?;
Ok(())
controller.putback(&identifier.id).await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip(identifiers, controller), err)]
pub(crate) async fn delete_trash_handler(
identifiers: AFPluginData<RepeatedTrashIdPB>,
controller: AFPluginState<Arc<TrashController>>,
identifiers: AFPluginData<RepeatedTrashIdPB>,
controller: AFPluginState<Arc<TrashController>>,
) -> Result<(), FlowyError> {
controller.delete(identifiers.into_inner()).await?;
Ok(())
controller.delete(identifiers.into_inner()).await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip(controller), err)]
pub(crate) async fn restore_all_trash_handler(
controller: AFPluginState<Arc<TrashController>>,
controller: AFPluginState<Arc<TrashController>>,
) -> Result<(), FlowyError> {
controller.restore_all_trash().await?;
Ok(())
controller.restore_all_trash().await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip(controller), err)]
pub(crate) async fn delete_all_trash_handler(
controller: AFPluginState<Arc<TrashController>>,
controller: AFPluginState<Arc<TrashController>>,
) -> Result<(), FlowyError> {
controller.delete_all_trash().await?;
Ok(())
controller.delete_all_trash().await?;
Ok(())
}

View File

@ -2,17 +2,17 @@ pub use crate::entities::view::ViewDataFormatPB;
use crate::entities::{AppPB, DeletedViewPB, ViewLayoutTypePB};
use crate::manager::{ViewDataProcessor, ViewDataProcessorMap};
use crate::{
entities::{
trash::{RepeatedTrashIdPB, TrashType},
view::{CreateViewParams, UpdateViewParams, ViewPB},
},
errors::{FlowyError, FlowyResult},
event_map::{FolderCouldServiceV1, WorkspaceUser},
notification::{send_notification, FolderNotification},
services::{
persistence::{FolderPersistence, FolderPersistenceTransaction, ViewChangeset},
TrashController, TrashEvent,
},
entities::{
trash::{RepeatedTrashIdPB, TrashType},
view::{CreateViewParams, UpdateViewParams, ViewPB},
},
errors::{FlowyError, FlowyResult},
event_map::{FolderCouldServiceV1, WorkspaceUser},
notification::{send_notification, FolderNotification},
services::{
persistence::{FolderPersistence, FolderPersistenceTransaction, ViewChangeset},
TrashController, TrashEvent,
},
};
use bytes::Bytes;
use flowy_sqlite::kv::KV;
@ -23,475 +23,511 @@ use std::{collections::HashSet, sync::Arc};
const LATEST_VIEW_ID: &str = "latest_view_id";
pub(crate) struct ViewController {
user: Arc<dyn WorkspaceUser>,
cloud_service: Arc<dyn FolderCouldServiceV1>,
persistence: Arc<FolderPersistence>,
trash_controller: Arc<TrashController>,
data_processors: ViewDataProcessorMap,
}
impl ViewController {
pub(crate) fn new(
user: Arc<dyn WorkspaceUser>,
cloud_service: Arc<dyn FolderCouldServiceV1>,
persistence: Arc<FolderPersistence>,
cloud_service: Arc<dyn FolderCouldServiceV1>,
trash_controller: Arc<TrashController>,
data_processors: ViewDataProcessorMap,
}
) -> Self {
Self {
user,
cloud_service,
persistence,
trash_controller,
data_processors,
}
}
impl ViewController {
pub(crate) fn new(
user: Arc<dyn WorkspaceUser>,
persistence: Arc<FolderPersistence>,
cloud_service: Arc<dyn FolderCouldServiceV1>,
trash_controller: Arc<TrashController>,
data_processors: ViewDataProcessorMap,
) -> Self {
Self {
user,
cloud_service,
persistence,
trash_controller,
data_processors,
pub(crate) fn initialize(&self) -> Result<(), FlowyError> {
self.listen_trash_can_event();
Ok(())
}
#[tracing::instrument(level = "trace", skip(self, params), fields(name = %params.name), err)]
pub(crate) async fn create_view_from_params(
&self,
mut params: CreateViewParams,
) -> Result<ViewRevision, FlowyError> {
let processor = self.get_data_processor(params.data_format.clone())?;
let user_id = self.user.user_id()?;
if params.initial_data.is_empty() {
tracing::trace!("Create view with build-in data");
let view_data = processor
.create_default_view(
&user_id,
&params.view_id,
params.layout.clone(),
params.data_format.clone(),
)
.await?;
params.initial_data = view_data.to_vec();
} else {
tracing::trace!("Create view with view data");
let view_data = processor
.create_view_with_data(
&user_id,
&params.view_id,
params.initial_data.clone(),
params.layout.clone(),
)
.await?;
self
.create_view(
&params.view_id,
params.data_format.clone(),
params.layout.clone(),
view_data,
)
.await?;
};
let view_rev = self.create_view_on_server(params).await?;
self.create_view_on_local(view_rev.clone()).await?;
Ok(view_rev)
}
#[tracing::instrument(level = "debug", skip(self, view_id, view_data), err)]
pub(crate) async fn create_view(
&self,
view_id: &str,
data_type: ViewDataFormatPB,
layout_type: ViewLayoutTypePB,
view_data: Bytes,
) -> Result<(), FlowyError> {
if view_data.is_empty() {
return Err(FlowyError::internal().context("The content of the view should not be empty"));
}
let user_id = self.user.user_id()?;
let processor = self.get_data_processor(data_type)?;
processor
.create_view(&user_id, view_id, layout_type, view_data)
.await?;
Ok(())
}
pub(crate) async fn create_view_on_local(
&self,
view_rev: ViewRevision,
) -> Result<(), FlowyError> {
let trash_controller = self.trash_controller.clone();
self
.persistence
.begin_transaction(|transaction| {
let belong_to_id = view_rev.app_id.clone();
transaction.create_view(view_rev)?;
notify_views_changed(&belong_to_id, trash_controller, &transaction)?;
Ok(())
})
.await
}
#[tracing::instrument(level = "debug", skip(self, view_id), err)]
pub(crate) async fn read_view(&self, view_id: &str) -> Result<ViewRevision, FlowyError> {
let view_rev = self
.persistence
.begin_transaction(|transaction| {
let view = transaction.read_view(view_id)?;
let trash_ids = self.trash_controller.read_trash_ids(&transaction)?;
if trash_ids.contains(&view.id) {
return Err(FlowyError::record_not_found());
}
}
Ok(view)
})
.await?;
Ok(view_rev)
}
pub(crate) fn initialize(&self) -> Result<(), FlowyError> {
self.listen_trash_can_event();
Ok(())
}
#[tracing::instrument(level = "trace", skip(self, params), fields(name = %params.name), err)]
pub(crate) async fn create_view_from_params(
&self,
mut params: CreateViewParams,
) -> Result<ViewRevision, FlowyError> {
let processor = self.get_data_processor(params.data_format.clone())?;
let user_id = self.user.user_id()?;
if params.initial_data.is_empty() {
tracing::trace!("Create view with build-in data");
let view_data = processor
.create_default_view(
&user_id,
&params.view_id,
params.layout.clone(),
params.data_format.clone(),
)
.await?;
params.initial_data = view_data.to_vec();
} else {
tracing::trace!("Create view with view data");
let view_data = processor
.create_view_with_data(
&user_id,
&params.view_id,
params.initial_data.clone(),
params.layout.clone(),
)
.await?;
self.create_view(
&params.view_id,
params.data_format.clone(),
params.layout.clone(),
view_data,
)
.await?;
};
let view_rev = self.create_view_on_server(params).await?;
self.create_view_on_local(view_rev.clone()).await?;
Ok(view_rev)
}
#[tracing::instrument(level = "debug", skip(self, view_id, view_data), err)]
pub(crate) async fn create_view(
&self,
view_id: &str,
data_type: ViewDataFormatPB,
layout_type: ViewLayoutTypePB,
view_data: Bytes,
) -> Result<(), FlowyError> {
if view_data.is_empty() {
return Err(FlowyError::internal().context("The content of the view should not be empty"));
pub(crate) async fn read_local_views(
&self,
ids: Vec<String>,
) -> Result<Vec<ViewRevision>, FlowyError> {
self
.persistence
.begin_transaction(|transaction| {
let mut views = vec![];
for view_id in ids {
views.push(transaction.read_view(&view_id)?);
}
let user_id = self.user.user_id()?;
let processor = self.get_data_processor(data_type)?;
processor.create_view(&user_id, view_id, layout_type, view_data).await?;
Ok(())
}
Ok(views)
})
.await
}
pub(crate) async fn create_view_on_local(&self, view_rev: ViewRevision) -> Result<(), FlowyError> {
let trash_controller = self.trash_controller.clone();
self.persistence
.begin_transaction(|transaction| {
let belong_to_id = view_rev.app_id.clone();
transaction.create_view(view_rev)?;
notify_views_changed(&belong_to_id, trash_controller, &transaction)?;
Ok(())
})
.await
}
#[tracing::instrument(level = "trace", skip(self), err)]
pub(crate) fn set_latest_view(&self, view_id: &str) -> Result<(), FlowyError> {
KV::set_str(LATEST_VIEW_ID, view_id.to_owned());
Ok(())
}
#[tracing::instrument(level = "debug", skip(self, view_id), err)]
pub(crate) async fn read_view(&self, view_id: &str) -> Result<ViewRevision, FlowyError> {
let view_rev = self
.persistence
.begin_transaction(|transaction| {
let view = transaction.read_view(view_id)?;
let trash_ids = self.trash_controller.read_trash_ids(&transaction)?;
if trash_ids.contains(&view.id) {
return Err(FlowyError::record_not_found());
}
Ok(view)
})
.await?;
Ok(view_rev)
}
#[tracing::instrument(level = "trace", skip(self))]
pub(crate) fn clear_latest_view(&self) {
let _ = KV::remove(LATEST_VIEW_ID);
}
pub(crate) async fn read_local_views(&self, ids: Vec<String>) -> Result<Vec<ViewRevision>, FlowyError> {
self.persistence
.begin_transaction(|transaction| {
let mut views = vec![];
for view_id in ids {
views.push(transaction.read_view(&view_id)?);
}
Ok(views)
})
.await
}
#[tracing::instrument(level = "debug", skip(self), err)]
pub(crate) async fn close_view(&self, view_id: &str) -> Result<(), FlowyError> {
let processor = self.get_data_processor_from_view_id(view_id).await?;
processor.close_view(view_id).await?;
Ok(())
}
#[tracing::instrument(level = "trace", skip(self), err)]
pub(crate) fn set_latest_view(&self, view_id: &str) -> Result<(), FlowyError> {
KV::set_str(LATEST_VIEW_ID, view_id.to_owned());
Ok(())
}
#[tracing::instrument(level = "trace", skip(self))]
pub(crate) fn clear_latest_view(&self) {
#[tracing::instrument(level = "debug", skip(self), err)]
pub(crate) async fn move_view_to_trash(&self, view_id: &str) -> Result<(), FlowyError> {
if let Some(latest_view_id) = KV::get_str(LATEST_VIEW_ID) {
if latest_view_id == view_id {
let _ = KV::remove(LATEST_VIEW_ID);
}
}
#[tracing::instrument(level = "debug", skip(self), err)]
pub(crate) async fn close_view(&self, view_id: &str) -> Result<(), FlowyError> {
let processor = self.get_data_processor_from_view_id(view_id).await?;
processor.close_view(view_id).await?;
let deleted_view = self
.persistence
.begin_transaction(|transaction| {
let view = transaction.read_view(view_id)?;
let views =
read_belonging_views_on_local(&view.app_id, self.trash_controller.clone(), &transaction)?;
let index = views
.iter()
.position(|view| view.id == view_id)
.map(|index| index as i32);
Ok(DeletedViewPB {
view_id: view_id.to_owned(),
index,
})
})
.await?;
send_notification(view_id, FolderNotification::DidMoveViewToTrash)
.payload(deleted_view)
.send();
let processor = self.get_data_processor_from_view_id(view_id).await?;
processor.close_view(view_id).await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip(self), err)]
pub(crate) async fn move_view(
&self,
view_id: &str,
from: usize,
to: usize,
) -> Result<(), FlowyError> {
self
.persistence
.begin_transaction(|transaction| {
transaction.move_view(view_id, from, to)?;
let view = transaction.read_view(view_id)?;
notify_views_changed(&view.app_id, self.trash_controller.clone(), &transaction)?;
Ok(())
}
})
.await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip(self), err)]
pub(crate) async fn move_view_to_trash(&self, view_id: &str) -> Result<(), FlowyError> {
if let Some(latest_view_id) = KV::get_str(LATEST_VIEW_ID) {
if latest_view_id == view_id {
let _ = KV::remove(LATEST_VIEW_ID);
}
}
#[tracing::instrument(level = "debug", skip(self), err)]
pub(crate) async fn duplicate_view(&self, view: ViewPB) -> Result<(), FlowyError> {
let view_rev = self
.persistence
.begin_transaction(|transaction| transaction.read_view(&view.id))
.await?;
let deleted_view = self
.persistence
.begin_transaction(|transaction| {
let view = transaction.read_view(view_id)?;
let views = read_belonging_views_on_local(&view.app_id, self.trash_controller.clone(), &transaction)?;
let processor = self.get_data_processor(view_rev.data_format.clone())?;
let view_data = processor.get_view_data(&view).await?;
let duplicate_params = CreateViewParams {
belong_to_id: view_rev.app_id.clone(),
name: format!("{} (copy)", &view_rev.name),
desc: view_rev.desc,
thumbnail: view_rev.thumbnail,
data_format: view_rev.data_format.into(),
layout: view_rev.layout.into(),
initial_data: view_data.to_vec(),
view_id: gen_view_id(),
};
let index = views
.iter()
.position(|view| view.id == view_id)
.map(|index| index as i32);
Ok(DeletedViewPB {
view_id: view_id.to_owned(),
index,
})
})
.await?;
let _ = self.create_view_from_params(duplicate_params).await?;
Ok(())
}
send_notification(view_id, FolderNotification::DidMoveViewToTrash)
.payload(deleted_view)
.send();
// belong_to_id will be the app_id or view_id.
#[tracing::instrument(level = "trace", skip(self), err)]
pub(crate) async fn read_views_belong_to(
&self,
belong_to_id: &str,
) -> Result<Vec<ViewRevision>, FlowyError> {
self
.persistence
.begin_transaction(|transaction| {
read_belonging_views_on_local(belong_to_id, self.trash_controller.clone(), &transaction)
})
.await
}
let processor = self.get_data_processor_from_view_id(view_id).await?;
processor.close_view(view_id).await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip(self), err)]
pub(crate) async fn move_view(&self, view_id: &str, from: usize, to: usize) -> Result<(), FlowyError> {
self.persistence
.begin_transaction(|transaction| {
transaction.move_view(view_id, from, to)?;
let view = transaction.read_view(view_id)?;
notify_views_changed(&view.app_id, self.trash_controller.clone(), &transaction)?;
Ok(())
})
.await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip(self), err)]
pub(crate) async fn duplicate_view(&self, view: ViewPB) -> Result<(), FlowyError> {
let view_rev = self
.persistence
.begin_transaction(|transaction| transaction.read_view(&view.id))
.await?;
let processor = self.get_data_processor(view_rev.data_format.clone())?;
let view_data = processor.get_view_data(&view).await?;
let duplicate_params = CreateViewParams {
belong_to_id: view_rev.app_id.clone(),
name: format!("{} (copy)", &view_rev.name),
desc: view_rev.desc,
thumbnail: view_rev.thumbnail,
data_format: view_rev.data_format.into(),
layout: view_rev.layout.into(),
initial_data: view_data.to_vec(),
view_id: gen_view_id(),
};
let _ = self.create_view_from_params(duplicate_params).await?;
Ok(())
}
// belong_to_id will be the app_id or view_id.
#[tracing::instrument(level = "trace", skip(self), err)]
pub(crate) async fn read_views_belong_to(&self, belong_to_id: &str) -> Result<Vec<ViewRevision>, FlowyError> {
self.persistence
.begin_transaction(|transaction| {
read_belonging_views_on_local(belong_to_id, self.trash_controller.clone(), &transaction)
})
.await
}
#[tracing::instrument(level = "debug", skip(self, params), err)]
pub(crate) async fn update_view(&self, params: UpdateViewParams) -> Result<ViewRevision, FlowyError> {
let changeset = ViewChangeset::new(params.clone());
let view_id = changeset.id.clone();
let view_rev = self
.persistence
.begin_transaction(|transaction| {
transaction.update_view(changeset)?;
let view_rev = transaction.read_view(&view_id)?;
let view: ViewPB = view_rev.clone().into();
send_notification(&view_id, FolderNotification::DidUpdateView)
.payload(view)
.send();
notify_views_changed(&view_rev.app_id, self.trash_controller.clone(), &transaction)?;
Ok(view_rev)
})
.await?;
let _ = self.update_view_on_server(params);
#[tracing::instrument(level = "debug", skip(self, params), err)]
pub(crate) async fn update_view(
&self,
params: UpdateViewParams,
) -> Result<ViewRevision, FlowyError> {
let changeset = ViewChangeset::new(params.clone());
let view_id = changeset.id.clone();
let view_rev = self
.persistence
.begin_transaction(|transaction| {
transaction.update_view(changeset)?;
let view_rev = transaction.read_view(&view_id)?;
let view: ViewPB = view_rev.clone().into();
send_notification(&view_id, FolderNotification::DidUpdateView)
.payload(view)
.send();
notify_views_changed(
&view_rev.app_id,
self.trash_controller.clone(),
&transaction,
)?;
Ok(view_rev)
}
})
.await?;
pub(crate) async fn latest_visit_view(&self) -> FlowyResult<Option<ViewRevision>> {
match KV::get_str(LATEST_VIEW_ID) {
None => Ok(None),
Some(view_id) => {
let view_rev = self
.persistence
.begin_transaction(|transaction| transaction.read_view(&view_id))
.await?;
Ok(Some(view_rev))
}
}
let _ = self.update_view_on_server(params);
Ok(view_rev)
}
pub(crate) async fn latest_visit_view(&self) -> FlowyResult<Option<ViewRevision>> {
match KV::get_str(LATEST_VIEW_ID) {
None => Ok(None),
Some(view_id) => {
let view_rev = self
.persistence
.begin_transaction(|transaction| transaction.read_view(&view_id))
.await?;
Ok(Some(view_rev))
},
}
}
}
impl ViewController {
#[tracing::instrument(level = "debug", skip(self, params), err)]
async fn create_view_on_server(&self, params: CreateViewParams) -> Result<ViewRevision, FlowyError> {
let token = self.user.token()?;
let view_rev = self.cloud_service.create_view(&token, params).await?;
Ok(view_rev)
}
#[tracing::instrument(level = "debug", skip(self, params), err)]
async fn create_view_on_server(
&self,
params: CreateViewParams,
) -> Result<ViewRevision, FlowyError> {
let token = self.user.token()?;
let view_rev = self.cloud_service.create_view(&token, params).await?;
Ok(view_rev)
}
#[tracing::instrument(level = "debug", skip(self), err)]
fn update_view_on_server(&self, params: UpdateViewParams) -> Result<(), FlowyError> {
let token = self.user.token()?;
let server = self.cloud_service.clone();
tokio::spawn(async move {
match server.update_view(&token, params).await {
Ok(_) => {}
Err(e) => {
// TODO: retry?
log::error!("Update view failed: {:?}", e);
}
}
});
Ok(())
}
#[tracing::instrument(level = "debug", skip(self), err)]
fn update_view_on_server(&self, params: UpdateViewParams) -> Result<(), FlowyError> {
let token = self.user.token()?;
let server = self.cloud_service.clone();
tokio::spawn(async move {
match server.update_view(&token, params).await {
Ok(_) => {},
Err(e) => {
// TODO: retry?
log::error!("Update view failed: {:?}", e);
},
}
});
Ok(())
}
fn listen_trash_can_event(&self) {
let mut rx = self.trash_controller.subscribe();
let persistence = self.persistence.clone();
let data_processors = self.data_processors.clone();
let trash_controller = self.trash_controller.clone();
let _ = tokio::spawn(async move {
loop {
let mut stream = Box::pin(rx.recv().into_stream().filter_map(|result| async move {
match result {
Ok(event) => event.select(TrashType::TrashView),
Err(_e) => None,
}
}));
fn listen_trash_can_event(&self) {
let mut rx = self.trash_controller.subscribe();
let persistence = self.persistence.clone();
let data_processors = self.data_processors.clone();
let trash_controller = self.trash_controller.clone();
let _ = tokio::spawn(async move {
loop {
let mut stream = Box::pin(rx.recv().into_stream().filter_map(|result| async move {
match result {
Ok(event) => event.select(TrashType::TrashView),
Err(_e) => None,
}
}));
if let Some(event) = stream.next().await {
handle_trash_event(
persistence.clone(),
data_processors.clone(),
trash_controller.clone(),
event,
)
.await
}
}
});
}
async fn get_data_processor_from_view_id(
&self,
view_id: &str,
) -> FlowyResult<Arc<dyn ViewDataProcessor + Send + Sync>> {
let view = self
.persistence
.begin_transaction(|transaction| transaction.read_view(view_id))
.await?;
self.get_data_processor(view.data_format)
}
#[inline]
fn get_data_processor<T: Into<ViewDataFormatPB>>(
&self,
data_type: T,
) -> FlowyResult<Arc<dyn ViewDataProcessor + Send + Sync>> {
let data_type = data_type.into();
match self.data_processors.get(&data_type) {
None => Err(FlowyError::internal().context(format!(
"Get data processor failed. Unknown view data type: {:?}",
data_type
))),
Some(processor) => Ok(processor.clone()),
if let Some(event) = stream.next().await {
handle_trash_event(
persistence.clone(),
data_processors.clone(),
trash_controller.clone(),
event,
)
.await
}
}
});
}
async fn get_data_processor_from_view_id(
&self,
view_id: &str,
) -> FlowyResult<Arc<dyn ViewDataProcessor + Send + Sync>> {
let view = self
.persistence
.begin_transaction(|transaction| transaction.read_view(view_id))
.await?;
self.get_data_processor(view.data_format)
}
#[inline]
fn get_data_processor<T: Into<ViewDataFormatPB>>(
&self,
data_type: T,
) -> FlowyResult<Arc<dyn ViewDataProcessor + Send + Sync>> {
let data_type = data_type.into();
match self.data_processors.get(&data_type) {
None => Err(FlowyError::internal().context(format!(
"Get data processor failed. Unknown view data type: {:?}",
data_type
))),
Some(processor) => Ok(processor.clone()),
}
}
}
#[tracing::instrument(level = "trace", skip(persistence, data_processors, trash_can))]
async fn handle_trash_event(
persistence: Arc<FolderPersistence>,
data_processors: ViewDataProcessorMap,
trash_can: Arc<TrashController>,
event: TrashEvent,
persistence: Arc<FolderPersistence>,
data_processors: ViewDataProcessorMap,
trash_can: Arc<TrashController>,
event: TrashEvent,
) {
match event {
TrashEvent::NewTrash(identifiers, ret) => {
let result = persistence
.begin_transaction(|transaction| {
let view_revs = read_local_views_with_transaction(identifiers, &transaction)?;
for view_rev in view_revs {
notify_views_changed(&view_rev.app_id, trash_can.clone(), &transaction)?;
notify_dart(view_rev.into(), FolderNotification::DidDeleteView);
}
Ok(())
})
.await;
let _ = ret.send(result).await;
}
TrashEvent::Putback(identifiers, ret) => {
let result = persistence
.begin_transaction(|transaction| {
let view_revs = read_local_views_with_transaction(identifiers, &transaction)?;
for view_rev in view_revs {
notify_views_changed(&view_rev.app_id, trash_can.clone(), &transaction)?;
notify_dart(view_rev.into(), FolderNotification::DidRestoreView);
}
Ok(())
})
.await;
let _ = ret.send(result).await;
}
TrashEvent::Delete(identifiers, ret) => {
let result = || async {
let views = persistence
.begin_transaction(|transaction| {
let mut notify_ids = HashSet::new();
let mut views = vec![];
for identifier in identifiers.items {
if let Ok(view_rev) = transaction.delete_view(&identifier.id) {
notify_ids.insert(view_rev.app_id.clone());
views.push(view_rev);
}
}
for notify_id in notify_ids {
notify_views_changed(&notify_id, trash_can.clone(), &transaction)?;
}
Ok(views)
})
.await?;
match event {
TrashEvent::NewTrash(identifiers, ret) => {
let result = persistence
.begin_transaction(|transaction| {
let view_revs = read_local_views_with_transaction(identifiers, &transaction)?;
for view_rev in view_revs {
notify_views_changed(&view_rev.app_id, trash_can.clone(), &transaction)?;
notify_dart(view_rev.into(), FolderNotification::DidDeleteView);
}
Ok(())
})
.await;
let _ = ret.send(result).await;
},
TrashEvent::Putback(identifiers, ret) => {
let result = persistence
.begin_transaction(|transaction| {
let view_revs = read_local_views_with_transaction(identifiers, &transaction)?;
for view_rev in view_revs {
notify_views_changed(&view_rev.app_id, trash_can.clone(), &transaction)?;
notify_dart(view_rev.into(), FolderNotification::DidRestoreView);
}
Ok(())
})
.await;
let _ = ret.send(result).await;
},
TrashEvent::Delete(identifiers, ret) => {
let result = || async {
let views = persistence
.begin_transaction(|transaction| {
let mut notify_ids = HashSet::new();
let mut views = vec![];
for identifier in identifiers.items {
if let Ok(view_rev) = transaction.delete_view(&identifier.id) {
notify_ids.insert(view_rev.app_id.clone());
views.push(view_rev);
}
}
for notify_id in notify_ids {
notify_views_changed(&notify_id, trash_can.clone(), &transaction)?;
}
Ok(views)
})
.await?;
for view in views {
let data_type = view.data_format.clone().into();
match get_data_processor(data_processors.clone(), &data_type) {
Ok(processor) => {
processor.close_view(&view.id).await?;
}
Err(e) => tracing::error!("{}", e),
}
}
Ok(())
};
let _ = ret.send(result().await).await;
for view in views {
let data_type = view.data_format.clone().into();
match get_data_processor(data_processors.clone(), &data_type) {
Ok(processor) => {
processor.close_view(&view.id).await?;
},
Err(e) => tracing::error!("{}", e),
}
}
}
Ok(())
};
let _ = ret.send(result().await).await;
},
}
}
fn get_data_processor(
data_processors: ViewDataProcessorMap,
data_type: &ViewDataFormatPB,
data_processors: ViewDataProcessorMap,
data_type: &ViewDataFormatPB,
) -> FlowyResult<Arc<dyn ViewDataProcessor + Send + Sync>> {
match data_processors.get(data_type) {
None => Err(FlowyError::internal().context(format!(
"Get data processor failed. Unknown view data type: {:?}",
data_type
))),
Some(processor) => Ok(processor.clone()),
}
match data_processors.get(data_type) {
None => Err(FlowyError::internal().context(format!(
"Get data processor failed. Unknown view data type: {:?}",
data_type
))),
Some(processor) => Ok(processor.clone()),
}
}
fn read_local_views_with_transaction<'a>(
identifiers: RepeatedTrashIdPB,
transaction: &'a (dyn FolderPersistenceTransaction + 'a),
identifiers: RepeatedTrashIdPB,
transaction: &'a (dyn FolderPersistenceTransaction + 'a),
) -> Result<Vec<ViewRevision>, FlowyError> {
let mut view_revs = vec![];
for identifier in identifiers.items {
view_revs.push(transaction.read_view(&identifier.id)?);
}
Ok(view_revs)
let mut view_revs = vec![];
for identifier in identifiers.items {
view_revs.push(transaction.read_view(&identifier.id)?);
}
Ok(view_revs)
}
fn notify_dart(view: ViewPB, notification: FolderNotification) {
send_notification(&view.id, notification).payload(view).send();
send_notification(&view.id, notification)
.payload(view)
.send();
}
#[tracing::instrument(
level = "debug",
skip(belong_to_id, trash_controller, transaction),
fields(view_count),
err
level = "debug",
skip(belong_to_id, trash_controller, transaction),
fields(view_count),
err
)]
fn notify_views_changed<'a>(
belong_to_id: &str,
trash_controller: Arc<TrashController>,
transaction: &'a (dyn FolderPersistenceTransaction + 'a),
belong_to_id: &str,
trash_controller: Arc<TrashController>,
transaction: &'a (dyn FolderPersistenceTransaction + 'a),
) -> FlowyResult<()> {
let mut app_rev = transaction.read_app(belong_to_id)?;
let trash_ids = trash_controller.read_trash_ids(transaction)?;
app_rev.belongings.retain(|view| !trash_ids.contains(&view.id));
let app: AppPB = app_rev.into();
let mut app_rev = transaction.read_app(belong_to_id)?;
let trash_ids = trash_controller.read_trash_ids(transaction)?;
app_rev
.belongings
.retain(|view| !trash_ids.contains(&view.id));
let app: AppPB = app_rev.into();
send_notification(belong_to_id, FolderNotification::DidUpdateApp)
.payload(app)
.send();
send_notification(belong_to_id, FolderNotification::DidUpdateApp)
.payload(app)
.send();
Ok(())
Ok(())
}
fn read_belonging_views_on_local<'a>(
belong_to_id: &str,
trash_controller: Arc<TrashController>,
transaction: &'a (dyn FolderPersistenceTransaction + 'a),
belong_to_id: &str,
trash_controller: Arc<TrashController>,
transaction: &'a (dyn FolderPersistenceTransaction + 'a),
) -> FlowyResult<Vec<ViewRevision>> {
let mut view_revs = transaction.read_views(belong_to_id)?;
let trash_ids = trash_controller.read_trash_ids(transaction)?;
view_revs.retain(|view_table| !trash_ids.contains(&view_table.id));
let mut view_revs = transaction.read_views(belong_to_id)?;
let trash_ids = trash_controller.read_trash_ids(transaction)?;
view_revs.retain(|view_table| !trash_ids.contains(&view_table.id));
Ok(view_revs)
Ok(view_revs)
}

View File

@ -2,119 +2,121 @@ use crate::entities::view::{MoveFolderItemParams, MoveFolderItemPayloadPB, MoveF
use crate::manager::FolderManager;
use crate::services::{notify_workspace_setting_did_change, AppController};
use crate::{
entities::{
trash::TrashPB,
view::{
CreateViewParams, CreateViewPayloadPB, RepeatedViewIdPB, UpdateViewParams, UpdateViewPayloadPB, ViewIdPB,
ViewPB,
},
entities::{
trash::TrashPB,
view::{
CreateViewParams, CreateViewPayloadPB, RepeatedViewIdPB, UpdateViewParams,
UpdateViewPayloadPB, ViewIdPB, ViewPB,
},
errors::FlowyError,
services::{TrashController, ViewController},
},
errors::FlowyError,
services::{TrashController, ViewController},
};
use folder_model::TrashRevision;
use lib_dispatch::prelude::{data_result, AFPluginData, AFPluginState, DataResult};
use std::{convert::TryInto, sync::Arc};
pub(crate) async fn create_view_handler(
data: AFPluginData<CreateViewPayloadPB>,
controller: AFPluginState<Arc<ViewController>>,
data: AFPluginData<CreateViewPayloadPB>,
controller: AFPluginState<Arc<ViewController>>,
) -> DataResult<ViewPB, FlowyError> {
let params: CreateViewParams = data.into_inner().try_into()?;
let view_rev = controller.create_view_from_params(params).await?;
data_result(view_rev.into())
let params: CreateViewParams = data.into_inner().try_into()?;
let view_rev = controller.create_view_from_params(params).await?;
data_result(view_rev.into())
}
pub(crate) async fn read_view_handler(
data: AFPluginData<ViewIdPB>,
controller: AFPluginState<Arc<ViewController>>,
data: AFPluginData<ViewIdPB>,
controller: AFPluginState<Arc<ViewController>>,
) -> DataResult<ViewPB, FlowyError> {
let view_id: ViewIdPB = data.into_inner();
let view_rev = controller.read_view(&view_id.value).await?;
data_result(view_rev.into())
let view_id: ViewIdPB = data.into_inner();
let view_rev = controller.read_view(&view_id.value).await?;
data_result(view_rev.into())
}
#[tracing::instrument(level = "debug", skip(data, controller), err)]
pub(crate) async fn update_view_handler(
data: AFPluginData<UpdateViewPayloadPB>,
controller: AFPluginState<Arc<ViewController>>,
data: AFPluginData<UpdateViewPayloadPB>,
controller: AFPluginState<Arc<ViewController>>,
) -> Result<(), FlowyError> {
let params: UpdateViewParams = data.into_inner().try_into()?;
let _ = controller.update_view(params).await?;
let params: UpdateViewParams = data.into_inner().try_into()?;
let _ = controller.update_view(params).await?;
Ok(())
Ok(())
}
pub(crate) async fn delete_view_handler(
data: AFPluginData<RepeatedViewIdPB>,
view_controller: AFPluginState<Arc<ViewController>>,
trash_controller: AFPluginState<Arc<TrashController>>,
data: AFPluginData<RepeatedViewIdPB>,
view_controller: AFPluginState<Arc<ViewController>>,
trash_controller: AFPluginState<Arc<TrashController>>,
) -> Result<(), FlowyError> {
let params: RepeatedViewIdPB = data.into_inner();
for view_id in &params.items {
let _ = view_controller.move_view_to_trash(view_id).await;
}
let params: RepeatedViewIdPB = data.into_inner();
for view_id in &params.items {
let _ = view_controller.move_view_to_trash(view_id).await;
}
let trash = view_controller
.read_local_views(params.items)
.await?
.into_iter()
.map(|view| {
let trash_rev: TrashRevision = view.into();
trash_rev.into()
})
.collect::<Vec<TrashPB>>();
let trash = view_controller
.read_local_views(params.items)
.await?
.into_iter()
.map(|view| {
let trash_rev: TrashRevision = view.into();
trash_rev.into()
})
.collect::<Vec<TrashPB>>();
trash_controller.add(trash).await?;
Ok(())
trash_controller.add(trash).await?;
Ok(())
}
pub(crate) async fn set_latest_view_handler(
data: AFPluginData<ViewIdPB>,
folder: AFPluginState<Arc<FolderManager>>,
controller: AFPluginState<Arc<ViewController>>,
data: AFPluginData<ViewIdPB>,
folder: AFPluginState<Arc<FolderManager>>,
controller: AFPluginState<Arc<ViewController>>,
) -> Result<(), FlowyError> {
let view_id: ViewIdPB = data.into_inner();
controller.set_latest_view(&view_id.value)?;
notify_workspace_setting_did_change(&folder, &view_id).await?;
Ok(())
let view_id: ViewIdPB = data.into_inner();
controller.set_latest_view(&view_id.value)?;
notify_workspace_setting_did_change(&folder, &view_id).await?;
Ok(())
}
pub(crate) async fn close_view_handler(
data: AFPluginData<ViewIdPB>,
controller: AFPluginState<Arc<ViewController>>,
data: AFPluginData<ViewIdPB>,
controller: AFPluginState<Arc<ViewController>>,
) -> Result<(), FlowyError> {
let view_id: ViewIdPB = data.into_inner();
controller.close_view(&view_id.value).await?;
Ok(())
let view_id: ViewIdPB = data.into_inner();
controller.close_view(&view_id.value).await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub(crate) async fn move_item_handler(
data: AFPluginData<MoveFolderItemPayloadPB>,
view_controller: AFPluginState<Arc<ViewController>>,
app_controller: AFPluginState<Arc<AppController>>,
data: AFPluginData<MoveFolderItemPayloadPB>,
view_controller: AFPluginState<Arc<ViewController>>,
app_controller: AFPluginState<Arc<AppController>>,
) -> Result<(), FlowyError> {
let params: MoveFolderItemParams = data.into_inner().try_into()?;
match params.ty {
MoveFolderItemType::MoveApp => {
app_controller.move_app(&params.item_id, params.from, params.to).await?;
}
MoveFolderItemType::MoveView => {
view_controller
.move_view(&params.item_id, params.from, params.to)
.await?;
}
}
Ok(())
let params: MoveFolderItemParams = data.into_inner().try_into()?;
match params.ty {
MoveFolderItemType::MoveApp => {
app_controller
.move_app(&params.item_id, params.from, params.to)
.await?;
},
MoveFolderItemType::MoveView => {
view_controller
.move_view(&params.item_id, params.from, params.to)
.await?;
},
}
Ok(())
}
#[tracing::instrument(level = "debug", skip(data, controller), err)]
pub(crate) async fn duplicate_view_handler(
data: AFPluginData<ViewPB>,
controller: AFPluginState<Arc<ViewController>>,
data: AFPluginData<ViewPB>,
controller: AFPluginState<Arc<ViewController>>,
) -> Result<(), FlowyError> {
let view: ViewPB = data.into_inner();
controller.duplicate_view(view).await?;
Ok(())
let view: ViewPB = data.into_inner();
controller.duplicate_view(view).await?;
Ok(())
}

View File

@ -15,137 +15,156 @@ use ws_model::ws_revision::{ClientRevisionWSData, NewDocumentUser};
#[derive(Clone)]
pub struct FolderResolveOperations(pub FolderOperations);
impl OperationsDeserializer<FolderResolveOperations> for FolderResolveOperations {
fn deserialize_revisions(revisions: Vec<Revision>) -> FlowyResult<FolderResolveOperations> {
Ok(FolderResolveOperations(make_operations_from_revisions(revisions)?))
}
fn deserialize_revisions(revisions: Vec<Revision>) -> FlowyResult<FolderResolveOperations> {
Ok(FolderResolveOperations(make_operations_from_revisions(
revisions,
)?))
}
}
impl OperationsSerializer for FolderResolveOperations {
fn serialize_operations(&self) -> Bytes {
self.0.json_bytes()
}
fn serialize_operations(&self) -> Bytes {
self.0.json_bytes()
}
}
impl FolderResolveOperations {
pub fn into_inner(self) -> FolderOperations {
self.0
}
pub fn into_inner(self) -> FolderOperations {
self.0
}
}
pub type FolderConflictController = ConflictController<FolderResolveOperations, Arc<ConnectionPool>>;
pub type FolderConflictController =
ConflictController<FolderResolveOperations, Arc<ConnectionPool>>;
#[allow(dead_code)]
pub(crate) async fn make_folder_ws_manager(
user_id: &str,
folder_id: &str,
rev_manager: Arc<RevisionManager<Arc<ConnectionPool>>>,
web_socket: Arc<dyn RevisionWebSocket>,
folder_pad: Arc<RwLock<FolderPad>>,
user_id: &str,
folder_id: &str,
rev_manager: Arc<RevisionManager<Arc<ConnectionPool>>>,
web_socket: Arc<dyn RevisionWebSocket>,
folder_pad: Arc<RwLock<FolderPad>>,
) -> Arc<RevisionWebSocketManager> {
let ws_data_provider = Arc::new(WSDataProvider::new(folder_id, Arc::new(rev_manager.clone())));
let resolver = Arc::new(FolderConflictResolver { folder_pad });
let conflict_controller =
FolderConflictController::new(user_id, resolver, Arc::new(ws_data_provider.clone()), rev_manager);
let ws_data_stream = Arc::new(FolderRevisionWSDataStream::new(conflict_controller));
let ws_data_sink = Arc::new(FolderWSDataSink(ws_data_provider));
let ping_duration = Duration::from_millis(FOLDER_SYNC_INTERVAL_IN_MILLIS);
Arc::new(RevisionWebSocketManager::new(
"Folder",
folder_id,
web_socket,
ws_data_sink,
ws_data_stream,
ping_duration,
))
let ws_data_provider = Arc::new(WSDataProvider::new(
folder_id,
Arc::new(rev_manager.clone()),
));
let resolver = Arc::new(FolderConflictResolver { folder_pad });
let conflict_controller = FolderConflictController::new(
user_id,
resolver,
Arc::new(ws_data_provider.clone()),
rev_manager,
);
let ws_data_stream = Arc::new(FolderRevisionWSDataStream::new(conflict_controller));
let ws_data_sink = Arc::new(FolderWSDataSink(ws_data_provider));
let ping_duration = Duration::from_millis(FOLDER_SYNC_INTERVAL_IN_MILLIS);
Arc::new(RevisionWebSocketManager::new(
"Folder",
folder_id,
web_socket,
ws_data_sink,
ws_data_stream,
ping_duration,
))
}
pub(crate) struct FolderWSDataSink(Arc<WSDataProvider>);
impl RevisionWebSocketSink for FolderWSDataSink {
fn next(&self) -> FutureResult<Option<ClientRevisionWSData>, FlowyError> {
let sink_provider = self.0.clone();
FutureResult::new(async move { sink_provider.next().await })
}
fn next(&self) -> FutureResult<Option<ClientRevisionWSData>, FlowyError> {
let sink_provider = self.0.clone();
FutureResult::new(async move { sink_provider.next().await })
}
}
struct FolderConflictResolver {
folder_pad: Arc<RwLock<FolderPad>>,
folder_pad: Arc<RwLock<FolderPad>>,
}
impl ConflictResolver<FolderResolveOperations> for FolderConflictResolver {
fn compose_operations(&self, operations: FolderResolveOperations) -> BoxResultFuture<RevisionMD5, FlowyError> {
let operations = operations.into_inner();
let folder_pad = self.folder_pad.clone();
Box::pin(async move {
let md5 = folder_pad.write().compose_remote_operations(operations)?;
Ok(md5.into())
})
}
fn compose_operations(
&self,
operations: FolderResolveOperations,
) -> BoxResultFuture<RevisionMD5, FlowyError> {
let operations = operations.into_inner();
let folder_pad = self.folder_pad.clone();
Box::pin(async move {
let md5 = folder_pad.write().compose_remote_operations(operations)?;
Ok(md5.into())
})
}
fn transform_operations(
&self,
operations: FolderResolveOperations,
) -> BoxResultFuture<TransformOperations<FolderResolveOperations>, FlowyError> {
let folder_pad = self.folder_pad.clone();
let operations = operations.into_inner();
Box::pin(async move {
let read_guard = folder_pad.read();
let mut server_operations: Option<FolderResolveOperations> = None;
let client_operations: FolderResolveOperations;
if read_guard.is_empty() {
// Do nothing
client_operations = FolderResolveOperations(operations);
} else {
let (s_prime, c_prime) = read_guard.get_operations().transform(&operations)?;
client_operations = FolderResolveOperations(c_prime);
server_operations = Some(FolderResolveOperations(s_prime));
}
drop(read_guard);
Ok(TransformOperations {
client_operations,
server_operations,
})
})
}
fn transform_operations(
&self,
operations: FolderResolveOperations,
) -> BoxResultFuture<TransformOperations<FolderResolveOperations>, FlowyError> {
let folder_pad = self.folder_pad.clone();
let operations = operations.into_inner();
Box::pin(async move {
let read_guard = folder_pad.read();
let mut server_operations: Option<FolderResolveOperations> = None;
let client_operations: FolderResolveOperations;
if read_guard.is_empty() {
// Do nothing
client_operations = FolderResolveOperations(operations);
} else {
let (s_prime, c_prime) = read_guard.get_operations().transform(&operations)?;
client_operations = FolderResolveOperations(c_prime);
server_operations = Some(FolderResolveOperations(s_prime));
}
drop(read_guard);
Ok(TransformOperations {
client_operations,
server_operations,
})
})
}
fn reset_operations(&self, operations: FolderResolveOperations) -> BoxResultFuture<RevisionMD5, FlowyError> {
let folder_pad = self.folder_pad.clone();
Box::pin(async move {
let md5 = folder_pad.write().reset_folder(operations.into_inner())?;
Ok(md5.into())
})
}
fn reset_operations(
&self,
operations: FolderResolveOperations,
) -> BoxResultFuture<RevisionMD5, FlowyError> {
let folder_pad = self.folder_pad.clone();
Box::pin(async move {
let md5 = folder_pad.write().reset_folder(operations.into_inner())?;
Ok(md5.into())
})
}
}
struct FolderRevisionWSDataStream {
conflict_controller: Arc<FolderConflictController>,
conflict_controller: Arc<FolderConflictController>,
}
impl FolderRevisionWSDataStream {
pub fn new(conflict_controller: FolderConflictController) -> Self {
Self {
conflict_controller: Arc::new(conflict_controller),
}
pub fn new(conflict_controller: FolderConflictController) -> Self {
Self {
conflict_controller: Arc::new(conflict_controller),
}
}
}
impl RevisionWSDataStream for FolderRevisionWSDataStream {
fn receive_push_revision(&self, revisions: Vec<Revision>) -> BoxResultFuture<(), FlowyError> {
let resolver = self.conflict_controller.clone();
Box::pin(async move { resolver.receive_revisions(revisions).await })
}
fn receive_push_revision(&self, revisions: Vec<Revision>) -> BoxResultFuture<(), FlowyError> {
let resolver = self.conflict_controller.clone();
Box::pin(async move { resolver.receive_revisions(revisions).await })
}
fn receive_ack(&self, rev_id: i64) -> BoxResultFuture<(), FlowyError> {
let resolver = self.conflict_controller.clone();
Box::pin(async move { resolver.ack_revision(rev_id).await })
}
fn receive_ack(&self, rev_id: i64) -> BoxResultFuture<(), FlowyError> {
let resolver = self.conflict_controller.clone();
Box::pin(async move { resolver.ack_revision(rev_id).await })
}
fn receive_new_user_connect(&self, _new_user: NewDocumentUser) -> BoxResultFuture<(), FlowyError> {
// Do nothing by now, just a placeholder for future extension.
Box::pin(async move { Ok(()) })
}
fn receive_new_user_connect(
&self,
_new_user: NewDocumentUser,
) -> BoxResultFuture<(), FlowyError> {
// Do nothing by now, just a placeholder for future extension.
Box::pin(async move { Ok(()) })
}
fn pull_revisions_in_range(&self, range: RevisionRange) -> BoxResultFuture<(), FlowyError> {
let resolver = self.conflict_controller.clone();
Box::pin(async move { resolver.send_revisions(range).await })
}
fn pull_revisions_in_range(&self, range: RevisionRange) -> BoxResultFuture<(), FlowyError> {
let resolver = self.conflict_controller.clone();
Box::pin(async move { resolver.send_revisions(range).await })
}
}

View File

@ -1,263 +1,281 @@
use crate::entities::workspace::*;
use crate::manager::FolderManager;
use crate::{
errors::*,
event_map::{FolderCouldServiceV1, WorkspaceUser},
notification::*,
services::{
persistence::{FolderPersistence, FolderPersistenceTransaction, WorkspaceChangeset},
read_workspace_apps, TrashController,
},
errors::*,
event_map::{FolderCouldServiceV1, WorkspaceUser},
notification::*,
services::{
persistence::{FolderPersistence, FolderPersistenceTransaction, WorkspaceChangeset},
read_workspace_apps, TrashController,
},
};
use flowy_sqlite::kv::KV;
use folder_model::{AppRevision, WorkspaceRevision};
use std::sync::Arc;
pub struct WorkspaceController {
pub user: Arc<dyn WorkspaceUser>,
pub user: Arc<dyn WorkspaceUser>,
persistence: Arc<FolderPersistence>,
pub(crate) trash_controller: Arc<TrashController>,
cloud_service: Arc<dyn FolderCouldServiceV1>,
}
impl WorkspaceController {
pub(crate) fn new(
user: Arc<dyn WorkspaceUser>,
persistence: Arc<FolderPersistence>,
pub(crate) trash_controller: Arc<TrashController>,
trash_can: Arc<TrashController>,
cloud_service: Arc<dyn FolderCouldServiceV1>,
) -> Self {
Self {
user,
persistence,
trash_controller: trash_can,
cloud_service,
}
}
pub(crate) async fn create_workspace_from_params(
&self,
params: CreateWorkspaceParams,
) -> Result<WorkspaceRevision, FlowyError> {
let workspace = self.create_workspace_on_server(params.clone()).await?;
let user_id = self.user.user_id()?;
let token = self.user.token()?;
let workspaces = self
.persistence
.begin_transaction(|transaction| {
transaction.create_workspace(&user_id, workspace.clone())?;
transaction.read_workspaces(&user_id, None)
})
.await?
.into_iter()
.map(|workspace_rev| workspace_rev.into())
.collect();
let repeated_workspace = RepeatedWorkspacePB { items: workspaces };
send_notification(&token, FolderNotification::DidCreateWorkspace)
.payload(repeated_workspace)
.send();
set_current_workspace(&user_id, &workspace.id);
Ok(workspace)
}
#[allow(dead_code)]
pub(crate) async fn update_workspace(
&self,
params: UpdateWorkspaceParams,
) -> Result<(), FlowyError> {
let changeset = WorkspaceChangeset::new(params.clone());
let workspace_id = changeset.id.clone();
let workspace = self
.persistence
.begin_transaction(|transaction| {
transaction.update_workspace(changeset)?;
let user_id = self.user.user_id()?;
self.read_workspace(workspace_id.clone(), &user_id, &transaction)
})
.await?;
send_notification(&workspace_id, FolderNotification::DidUpdateWorkspace)
.payload(workspace)
.send();
self.update_workspace_on_server(params)?;
Ok(())
}
#[allow(dead_code)]
pub(crate) async fn delete_workspace(&self, workspace_id: &str) -> Result<(), FlowyError> {
let user_id = self.user.user_id()?;
let token = self.user.token()?;
let repeated_workspace = self
.persistence
.begin_transaction(|transaction| {
transaction.delete_workspace(workspace_id)?;
self.read_workspaces(None, &user_id, &transaction)
})
.await?;
send_notification(&token, FolderNotification::DidDeleteWorkspace)
.payload(repeated_workspace)
.send();
self.delete_workspace_on_server(workspace_id)?;
Ok(())
}
pub(crate) async fn open_workspace(
&self,
params: WorkspaceIdPB,
) -> Result<WorkspacePB, FlowyError> {
let user_id = self.user.user_id()?;
if let Some(workspace_id) = params.value {
let workspace = self
.persistence
.begin_transaction(|transaction| self.read_workspace(workspace_id, &user_id, &transaction))
.await?;
set_current_workspace(&user_id, &workspace.id);
Ok(workspace)
} else {
Err(FlowyError::workspace_id().context("Opened workspace id should not be empty"))
}
}
pub(crate) async fn read_current_workspace_apps(&self) -> Result<Vec<AppRevision>, FlowyError> {
let user_id = self.user.user_id()?;
let workspace_id = get_current_workspace(&user_id)?;
let app_revs = self
.persistence
.begin_transaction(|transaction| {
read_workspace_apps(&workspace_id, self.trash_controller.clone(), &transaction)
})
.await?;
// TODO: read from server
Ok(app_revs)
}
#[tracing::instrument(level = "debug", skip(self, transaction), err)]
pub(crate) fn read_workspaces<'a>(
&self,
workspace_id: Option<String>,
user_id: &str,
transaction: &'a (dyn FolderPersistenceTransaction + 'a),
) -> Result<RepeatedWorkspacePB, FlowyError> {
let workspace_id = workspace_id.to_owned();
let trash_ids = self.trash_controller.read_trash_ids(transaction)?;
let workspaces = transaction
.read_workspaces(user_id, workspace_id)?
.into_iter()
.map(|mut workspace_rev| {
workspace_rev
.apps
.retain(|app_rev| !trash_ids.contains(&app_rev.id));
workspace_rev.into()
})
.collect();
Ok(RepeatedWorkspacePB { items: workspaces })
}
pub(crate) fn read_workspace<'a>(
&self,
workspace_id: String,
user_id: &str,
transaction: &'a (dyn FolderPersistenceTransaction + 'a),
) -> Result<WorkspacePB, FlowyError> {
let mut workspaces = self
.read_workspaces(Some(workspace_id.clone()), user_id, transaction)?
.items;
if workspaces.is_empty() {
return Err(
FlowyError::record_not_found().context(format!("{} workspace not found", workspace_id)),
);
}
debug_assert_eq!(workspaces.len(), 1);
let workspace = workspaces
.drain(..1)
.collect::<Vec<WorkspacePB>>()
.pop()
.unwrap();
Ok(workspace)
}
}
impl WorkspaceController {
pub(crate) fn new(
user: Arc<dyn WorkspaceUser>,
persistence: Arc<FolderPersistence>,
trash_can: Arc<TrashController>,
cloud_service: Arc<dyn FolderCouldServiceV1>,
) -> Self {
Self {
user,
persistence,
trash_controller: trash_can,
cloud_service,
}
}
#[tracing::instrument(level = "trace", skip(self), err)]
async fn create_workspace_on_server(
&self,
params: CreateWorkspaceParams,
) -> Result<WorkspaceRevision, FlowyError> {
let token = self.user.token()?;
self.cloud_service.create_workspace(&token, params).await
}
pub(crate) async fn create_workspace_from_params(
&self,
params: CreateWorkspaceParams,
) -> Result<WorkspaceRevision, FlowyError> {
let workspace = self.create_workspace_on_server(params.clone()).await?;
let user_id = self.user.user_id()?;
let token = self.user.token()?;
let workspaces = self
.persistence
.begin_transaction(|transaction| {
transaction.create_workspace(&user_id, workspace.clone())?;
transaction.read_workspaces(&user_id, None)
})
.await?
.into_iter()
.map(|workspace_rev| workspace_rev.into())
.collect();
let repeated_workspace = RepeatedWorkspacePB { items: workspaces };
send_notification(&token, FolderNotification::DidCreateWorkspace)
.payload(repeated_workspace)
.send();
set_current_workspace(&user_id, &workspace.id);
Ok(workspace)
}
#[tracing::instrument(level = "trace", skip(self), err)]
fn update_workspace_on_server(&self, params: UpdateWorkspaceParams) -> Result<(), FlowyError> {
let (token, server) = (self.user.token()?, self.cloud_service.clone());
tokio::spawn(async move {
match server.update_workspace(&token, params).await {
Ok(_) => {},
Err(e) => {
// TODO: retry?
log::error!("Update workspace failed: {:?}", e);
},
}
});
Ok(())
}
#[allow(dead_code)]
pub(crate) async fn update_workspace(&self, params: UpdateWorkspaceParams) -> Result<(), FlowyError> {
let changeset = WorkspaceChangeset::new(params.clone());
let workspace_id = changeset.id.clone();
let workspace = self
.persistence
.begin_transaction(|transaction| {
transaction.update_workspace(changeset)?;
let user_id = self.user.user_id()?;
self.read_workspace(workspace_id.clone(), &user_id, &transaction)
})
.await?;
send_notification(&workspace_id, FolderNotification::DidUpdateWorkspace)
.payload(workspace)
.send();
self.update_workspace_on_server(params)?;
Ok(())
}
#[allow(dead_code)]
pub(crate) async fn delete_workspace(&self, workspace_id: &str) -> Result<(), FlowyError> {
let user_id = self.user.user_id()?;
let token = self.user.token()?;
let repeated_workspace = self
.persistence
.begin_transaction(|transaction| {
transaction.delete_workspace(workspace_id)?;
self.read_workspaces(None, &user_id, &transaction)
})
.await?;
send_notification(&token, FolderNotification::DidDeleteWorkspace)
.payload(repeated_workspace)
.send();
self.delete_workspace_on_server(workspace_id)?;
Ok(())
}
pub(crate) async fn open_workspace(&self, params: WorkspaceIdPB) -> Result<WorkspacePB, FlowyError> {
let user_id = self.user.user_id()?;
if let Some(workspace_id) = params.value {
let workspace = self
.persistence
.begin_transaction(|transaction| self.read_workspace(workspace_id, &user_id, &transaction))
.await?;
set_current_workspace(&user_id, &workspace.id);
Ok(workspace)
} else {
Err(FlowyError::workspace_id().context("Opened workspace id should not be empty"))
}
}
pub(crate) async fn read_current_workspace_apps(&self) -> Result<Vec<AppRevision>, FlowyError> {
let user_id = self.user.user_id()?;
let workspace_id = get_current_workspace(&user_id)?;
let app_revs = self
.persistence
.begin_transaction(|transaction| {
read_workspace_apps(&workspace_id, self.trash_controller.clone(), &transaction)
})
.await?;
// TODO: read from server
Ok(app_revs)
}
#[tracing::instrument(level = "debug", skip(self, transaction), err)]
pub(crate) fn read_workspaces<'a>(
&self,
workspace_id: Option<String>,
user_id: &str,
transaction: &'a (dyn FolderPersistenceTransaction + 'a),
) -> Result<RepeatedWorkspacePB, FlowyError> {
let workspace_id = workspace_id.to_owned();
let trash_ids = self.trash_controller.read_trash_ids(transaction)?;
let workspaces = transaction
.read_workspaces(user_id, workspace_id)?
.into_iter()
.map(|mut workspace_rev| {
workspace_rev.apps.retain(|app_rev| !trash_ids.contains(&app_rev.id));
workspace_rev.into()
})
.collect();
Ok(RepeatedWorkspacePB { items: workspaces })
}
pub(crate) fn read_workspace<'a>(
&self,
workspace_id: String,
user_id: &str,
transaction: &'a (dyn FolderPersistenceTransaction + 'a),
) -> Result<WorkspacePB, FlowyError> {
let mut workspaces = self
.read_workspaces(Some(workspace_id.clone()), user_id, transaction)?
.items;
if workspaces.is_empty() {
return Err(FlowyError::record_not_found().context(format!("{} workspace not found", workspace_id)));
}
debug_assert_eq!(workspaces.len(), 1);
let workspace = workspaces.drain(..1).collect::<Vec<WorkspacePB>>().pop().unwrap();
Ok(workspace)
}
}
impl WorkspaceController {
#[tracing::instrument(level = "trace", skip(self), err)]
async fn create_workspace_on_server(&self, params: CreateWorkspaceParams) -> Result<WorkspaceRevision, FlowyError> {
let token = self.user.token()?;
self.cloud_service.create_workspace(&token, params).await
}
#[tracing::instrument(level = "trace", skip(self), err)]
fn update_workspace_on_server(&self, params: UpdateWorkspaceParams) -> Result<(), FlowyError> {
let (token, server) = (self.user.token()?, self.cloud_service.clone());
tokio::spawn(async move {
match server.update_workspace(&token, params).await {
Ok(_) => {}
Err(e) => {
// TODO: retry?
log::error!("Update workspace failed: {:?}", e);
}
}
});
Ok(())
}
#[tracing::instrument(level = "trace", skip(self), err)]
fn delete_workspace_on_server(&self, workspace_id: &str) -> Result<(), FlowyError> {
let params = WorkspaceIdPB {
value: Some(workspace_id.to_string()),
};
let (token, server) = (self.user.token()?, self.cloud_service.clone());
tokio::spawn(async move {
match server.delete_workspace(&token, params).await {
Ok(_) => {}
Err(e) => {
// TODO: retry?
log::error!("Delete workspace failed: {:?}", e);
}
}
});
Ok(())
}
#[tracing::instrument(level = "trace", skip(self), err)]
fn delete_workspace_on_server(&self, workspace_id: &str) -> Result<(), FlowyError> {
let params = WorkspaceIdPB {
value: Some(workspace_id.to_string()),
};
let (token, server) = (self.user.token()?, self.cloud_service.clone());
tokio::spawn(async move {
match server.delete_workspace(&token, params).await {
Ok(_) => {},
Err(e) => {
// TODO: retry?
log::error!("Delete workspace failed: {:?}", e);
},
}
});
Ok(())
}
}
pub async fn notify_workspace_setting_did_change(
folder_manager: &Arc<FolderManager>,
view_id: &str,
folder_manager: &Arc<FolderManager>,
view_id: &str,
) -> FlowyResult<()> {
let user_id = folder_manager.user.user_id()?;
let token = folder_manager.user.token()?;
let workspace_id = get_current_workspace(&user_id)?;
let user_id = folder_manager.user.user_id()?;
let token = folder_manager.user.token()?;
let workspace_id = get_current_workspace(&user_id)?;
let workspace_setting = folder_manager
.persistence
.begin_transaction(|transaction| {
let workspace =
folder_manager
.workspace_controller
.read_workspace(workspace_id.clone(), &user_id, &transaction)?;
let workspace_setting = folder_manager
.persistence
.begin_transaction(|transaction| {
let workspace = folder_manager.workspace_controller.read_workspace(
workspace_id.clone(),
&user_id,
&transaction,
)?;
let setting = match transaction.read_view(view_id) {
Ok(latest_view) => WorkspaceSettingPB {
workspace,
latest_view: Some(latest_view.into()),
},
Err(_) => WorkspaceSettingPB {
workspace,
latest_view: None,
},
};
let setting = match transaction.read_view(view_id) {
Ok(latest_view) => WorkspaceSettingPB {
workspace,
latest_view: Some(latest_view.into()),
},
Err(_) => WorkspaceSettingPB {
workspace,
latest_view: None,
},
};
Ok(setting)
})
.await?;
Ok(setting)
})
.await?;
send_notification(&token, FolderNotification::DidUpdateWorkspaceSetting)
.payload(workspace_setting)
.send();
Ok(())
send_notification(&token, FolderNotification::DidUpdateWorkspaceSetting)
.payload(workspace_setting)
.send();
Ok(())
}
const CURRENT_WORKSPACE_ID: &str = "current_workspace_id";
pub fn set_current_workspace(_user_id: &str, workspace_id: &str) {
KV::set_str(CURRENT_WORKSPACE_ID, workspace_id.to_owned());
KV::set_str(CURRENT_WORKSPACE_ID, workspace_id.to_owned());
}
pub fn clear_current_workspace(_user_id: &str) {
let _ = KV::remove(CURRENT_WORKSPACE_ID);
let _ = KV::remove(CURRENT_WORKSPACE_ID);
}
pub fn get_current_workspace(_user_id: &str) -> Result<String, FlowyError> {
match KV::get_str(CURRENT_WORKSPACE_ID) {
None => {
Err(FlowyError::record_not_found()
.context("Current workspace not found or should call open workspace first"))
}
Some(workspace_id) => Ok(workspace_id),
}
match KV::get_str(CURRENT_WORKSPACE_ID) {
None => Err(
FlowyError::record_not_found()
.context("Current workspace not found or should call open workspace first"),
),
Some(workspace_id) => Ok(workspace_id),
}
}

View File

@ -1,99 +1,103 @@
use crate::entities::{
app::RepeatedAppPB,
view::ViewPB,
workspace::{RepeatedWorkspacePB, WorkspaceIdPB, WorkspaceSettingPB, *},
app::RepeatedAppPB,
view::ViewPB,
workspace::{RepeatedWorkspacePB, WorkspaceIdPB, WorkspaceSettingPB, *},
};
use crate::{
errors::FlowyError,
manager::FolderManager,
services::{get_current_workspace, read_workspace_apps, WorkspaceController},
errors::FlowyError,
manager::FolderManager,
services::{get_current_workspace, read_workspace_apps, WorkspaceController},
};
use lib_dispatch::prelude::{data_result, AFPluginData, AFPluginState, DataResult};
use std::{convert::TryInto, sync::Arc};
#[tracing::instrument(level = "debug", skip(data, controller), err)]
pub(crate) async fn create_workspace_handler(
data: AFPluginData<CreateWorkspacePayloadPB>,
controller: AFPluginState<Arc<WorkspaceController>>,
data: AFPluginData<CreateWorkspacePayloadPB>,
controller: AFPluginState<Arc<WorkspaceController>>,
) -> DataResult<WorkspacePB, FlowyError> {
let controller = controller.get_ref().clone();
let params: CreateWorkspaceParams = data.into_inner().try_into()?;
let workspace_rev = controller.create_workspace_from_params(params).await?;
data_result(workspace_rev.into())
let controller = controller.get_ref().clone();
let params: CreateWorkspaceParams = data.into_inner().try_into()?;
let workspace_rev = controller.create_workspace_from_params(params).await?;
data_result(workspace_rev.into())
}
#[tracing::instrument(level = "debug", skip(controller), err)]
pub(crate) async fn read_workspace_apps_handler(
controller: AFPluginState<Arc<WorkspaceController>>,
controller: AFPluginState<Arc<WorkspaceController>>,
) -> DataResult<RepeatedAppPB, FlowyError> {
let items = controller
.read_current_workspace_apps()
.await?
.into_iter()
.map(|app_rev| app_rev.into())
.collect();
let repeated_app = RepeatedAppPB { items };
data_result(repeated_app)
let items = controller
.read_current_workspace_apps()
.await?
.into_iter()
.map(|app_rev| app_rev.into())
.collect();
let repeated_app = RepeatedAppPB { items };
data_result(repeated_app)
}
#[tracing::instrument(level = "debug", skip(data, controller), err)]
pub(crate) async fn open_workspace_handler(
data: AFPluginData<WorkspaceIdPB>,
controller: AFPluginState<Arc<WorkspaceController>>,
data: AFPluginData<WorkspaceIdPB>,
controller: AFPluginState<Arc<WorkspaceController>>,
) -> DataResult<WorkspacePB, FlowyError> {
let params: WorkspaceIdPB = data.into_inner();
let workspaces = controller.open_workspace(params).await?;
data_result(workspaces)
let params: WorkspaceIdPB = data.into_inner();
let workspaces = controller.open_workspace(params).await?;
data_result(workspaces)
}
#[tracing::instrument(level = "debug", skip(data, folder), err)]
pub(crate) async fn read_workspaces_handler(
data: AFPluginData<WorkspaceIdPB>,
folder: AFPluginState<Arc<FolderManager>>,
data: AFPluginData<WorkspaceIdPB>,
folder: AFPluginState<Arc<FolderManager>>,
) -> DataResult<RepeatedWorkspacePB, FlowyError> {
let params: WorkspaceIdPB = data.into_inner();
let user_id = folder.user.user_id()?;
let workspace_controller = folder.workspace_controller.clone();
let params: WorkspaceIdPB = data.into_inner();
let user_id = folder.user.user_id()?;
let workspace_controller = folder.workspace_controller.clone();
let trash_controller = folder.trash_controller.clone();
let workspaces = folder
.persistence
.begin_transaction(|transaction| {
let mut workspaces = workspace_controller.read_workspaces(params.value.clone(), &user_id, &transaction)?;
for workspace in workspaces.iter_mut() {
let apps = read_workspace_apps(&workspace.id, trash_controller.clone(), &transaction)?
.into_iter()
.map(|app_rev| app_rev.into())
.collect();
workspace.apps.items = apps;
}
Ok(workspaces)
})
.await?;
data_result(workspaces)
let trash_controller = folder.trash_controller.clone();
let workspaces = folder
.persistence
.begin_transaction(|transaction| {
let mut workspaces =
workspace_controller.read_workspaces(params.value.clone(), &user_id, &transaction)?;
for workspace in workspaces.iter_mut() {
let apps = read_workspace_apps(&workspace.id, trash_controller.clone(), &transaction)?
.into_iter()
.map(|app_rev| app_rev.into())
.collect();
workspace.apps.items = apps;
}
Ok(workspaces)
})
.await?;
data_result(workspaces)
}
#[tracing::instrument(level = "debug", skip(folder), err)]
pub async fn read_cur_workspace_handler(
folder: AFPluginState<Arc<FolderManager>>,
folder: AFPluginState<Arc<FolderManager>>,
) -> DataResult<WorkspaceSettingPB, FlowyError> {
let user_id = folder.user.user_id()?;
let workspace_id = get_current_workspace(&user_id)?;
let workspace = folder
.persistence
.begin_transaction(|transaction| {
folder
.workspace_controller
.read_workspace(workspace_id, &user_id, &transaction)
})
.await?;
let user_id = folder.user.user_id()?;
let workspace_id = get_current_workspace(&user_id)?;
let workspace = folder
.persistence
.begin_transaction(|transaction| {
folder
.workspace_controller
.read_workspace(workspace_id, &user_id, &transaction)
})
.await?;
let latest_view: Option<ViewPB> = folder
.view_controller
.latest_visit_view()
.await
.unwrap_or(None)
.map(|view_rev| view_rev.into());
let setting = WorkspaceSettingPB { workspace, latest_view };
data_result(setting)
let latest_view: Option<ViewPB> = folder
.view_controller
.latest_visit_view()
.await
.unwrap_or(None)
.map(|view_rev| view_rev.into());
let setting = WorkspaceSettingPB {
workspace,
latest_view,
};
data_result(setting)
}

View File

@ -3,73 +3,78 @@ use crate::event_map::{FolderCouldServiceV1, WorkspaceUser};
use lib_infra::retry::Action;
use pin_project::pin_project;
use std::{
future::Future,
marker::PhantomData,
pin::Pin,
sync::Arc,
task::{Context, Poll},
future::Future,
marker::PhantomData,
pin::Pin,
sync::Arc,
task::{Context, Poll},
};
pub(crate) type Builder<Fut> = Box<dyn Fn(String, Arc<dyn FolderCouldServiceV1>) -> Fut + Send + Sync>;
pub(crate) type Builder<Fut> =
Box<dyn Fn(String, Arc<dyn FolderCouldServiceV1>) -> Fut + Send + Sync>;
#[allow(dead_code)]
pub(crate) struct RetryAction<Fut, T, E> {
token: String,
cloud_service: Arc<dyn FolderCouldServiceV1>,
user: Arc<dyn WorkspaceUser>,
builder: Builder<Fut>,
phantom: PhantomData<(T, E)>,
token: String,
cloud_service: Arc<dyn FolderCouldServiceV1>,
user: Arc<dyn WorkspaceUser>,
builder: Builder<Fut>,
phantom: PhantomData<(T, E)>,
}
impl<Fut, T, E> RetryAction<Fut, T, E> {
#[allow(dead_code)]
pub(crate) fn new<F>(cloud_service: Arc<dyn FolderCouldServiceV1>, user: Arc<dyn WorkspaceUser>, builder: F) -> Self
where
Fut: Future<Output = Result<T, E>> + Send + Sync + 'static,
F: Fn(String, Arc<dyn FolderCouldServiceV1>) -> Fut + Send + Sync + 'static,
{
let token = user.token().unwrap_or_else(|_| "".to_owned());
Self {
token,
cloud_service,
user,
builder: Box::new(builder),
phantom: PhantomData,
}
#[allow(dead_code)]
pub(crate) fn new<F>(
cloud_service: Arc<dyn FolderCouldServiceV1>,
user: Arc<dyn WorkspaceUser>,
builder: F,
) -> Self
where
Fut: Future<Output = Result<T, E>> + Send + Sync + 'static,
F: Fn(String, Arc<dyn FolderCouldServiceV1>) -> Fut + Send + Sync + 'static,
{
let token = user.token().unwrap_or_else(|_| "".to_owned());
Self {
token,
cloud_service,
user,
builder: Box::new(builder),
phantom: PhantomData,
}
}
}
impl<Fut, T, E> Action for RetryAction<Fut, T, E>
where
Fut: Future<Output = Result<T, E>> + Send + Sync + 'static,
T: Send + Sync + 'static,
E: Send + Sync + 'static,
Fut: Future<Output = Result<T, E>> + Send + Sync + 'static,
T: Send + Sync + 'static,
E: Send + Sync + 'static,
{
type Future = Pin<Box<dyn Future<Output = Result<Self::Item, Self::Error>> + Send + Sync>>;
type Item = T;
type Error = E;
type Future = Pin<Box<dyn Future<Output = Result<Self::Item, Self::Error>> + Send + Sync>>;
type Item = T;
type Error = E;
fn run(&mut self) -> Self::Future {
let fut = (self.builder)(self.token.clone(), self.cloud_service.clone());
Box::pin(RetryActionFut { fut: Box::pin(fut) })
}
fn run(&mut self) -> Self::Future {
let fut = (self.builder)(self.token.clone(), self.cloud_service.clone());
Box::pin(RetryActionFut { fut: Box::pin(fut) })
}
}
#[pin_project]
struct RetryActionFut<T, E> {
#[pin]
fut: Pin<Box<dyn Future<Output = Result<T, E>> + Send + Sync>>,
#[pin]
fut: Pin<Box<dyn Future<Output = Result<T, E>> + Send + Sync>>,
}
impl<T, E> Future for RetryActionFut<T, E>
where
T: Send + Sync + 'static,
E: Send + Sync + 'static,
T: Send + Sync + 'static,
E: Send + Sync + 'static,
{
type Output = Result<T, E>;
type Output = Result<T, E>;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let mut this = self.project();
this.fut.as_mut().poll(cx)
}
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let mut this = self.project();
this.fut.as_mut().poll(cx)
}
}

View File

@ -6,288 +6,310 @@ use flowy_test::{event_builder::*, FlowySDKTest};
#[tokio::test]
async fn workspace_read_all() {
let mut test = FolderTest::new().await;
test.run_scripts(vec![ReadAllWorkspaces]).await;
assert!(!test.all_workspace.is_empty());
let mut test = FolderTest::new().await;
test.run_scripts(vec![ReadAllWorkspaces]).await;
assert!(!test.all_workspace.is_empty());
}
#[tokio::test]
async fn workspace_create() {
let mut test = FolderTest::new().await;
let name = "My new workspace".to_owned();
let desc = "Daily routines".to_owned();
test.run_scripts(vec![CreateWorkspace {
name: name.clone(),
desc: desc.clone(),
let mut test = FolderTest::new().await;
let name = "My new workspace".to_owned();
let desc = "Daily routines".to_owned();
test
.run_scripts(vec![CreateWorkspace {
name: name.clone(),
desc: desc.clone(),
}])
.await;
let workspace = test.workspace.clone();
assert_eq!(workspace.name, name);
assert_eq!(workspace.desc, desc);
let workspace = test.workspace.clone();
assert_eq!(workspace.name, name);
assert_eq!(workspace.desc, desc);
test.run_scripts(vec![
ReadWorkspace(Some(workspace.id.clone())),
AssertWorkspace(workspace),
test
.run_scripts(vec![
ReadWorkspace(Some(workspace.id.clone())),
AssertWorkspace(workspace),
])
.await;
}
#[tokio::test]
async fn workspace_read() {
let mut test = FolderTest::new().await;
let workspace = test.workspace.clone();
let mut test = FolderTest::new().await;
let workspace = test.workspace.clone();
test.run_scripts(vec![
ReadWorkspace(Some(workspace.id.clone())),
AssertWorkspace(workspace),
test
.run_scripts(vec![
ReadWorkspace(Some(workspace.id.clone())),
AssertWorkspace(workspace),
])
.await;
}
#[tokio::test]
async fn workspace_create_with_apps() {
let mut test = FolderTest::new().await;
test.run_scripts(vec![CreateApp {
name: "App".to_string(),
desc: "App description".to_string(),
let mut test = FolderTest::new().await;
test
.run_scripts(vec![CreateApp {
name: "App".to_string(),
desc: "App description".to_string(),
}])
.await;
let app = test.app.clone();
test.run_scripts(vec![ReadApp(app.id)]).await;
let app = test.app.clone();
test.run_scripts(vec![ReadApp(app.id)]).await;
}
#[tokio::test]
async fn workspace_create_with_invalid_name() {
for (name, code) in invalid_workspace_name_test_case() {
let sdk = FlowySDKTest::default();
let request = CreateWorkspacePayloadPB {
name,
desc: "".to_owned(),
};
assert_eq!(
FolderEventBuilder::new(sdk)
.event(flowy_folder::event_map::FolderEvent::CreateWorkspace)
.payload(request)
.async_send()
.await
.error()
.code,
code.value()
)
}
for (name, code) in invalid_workspace_name_test_case() {
let sdk = FlowySDKTest::default();
let request = CreateWorkspacePayloadPB {
name,
desc: "".to_owned(),
};
assert_eq!(
FolderEventBuilder::new(sdk)
.event(flowy_folder::event_map::FolderEvent::CreateWorkspace)
.payload(request)
.async_send()
.await
.error()
.code,
code.value()
)
}
}
#[tokio::test]
#[should_panic]
async fn app_delete() {
let mut test = FolderTest::new().await;
let app = test.app.clone();
test.run_scripts(vec![DeleteApp, ReadApp(app.id)]).await;
let mut test = FolderTest::new().await;
let app = test.app.clone();
test.run_scripts(vec![DeleteApp, ReadApp(app.id)]).await;
}
#[tokio::test]
async fn app_delete_then_restore() {
let mut test = FolderTest::new().await;
let app = test.app.clone();
test.run_scripts(vec![
DeleteApp,
RestoreAppFromTrash,
ReadApp(app.id.clone()),
AssertApp(app),
let mut test = FolderTest::new().await;
let app = test.app.clone();
test
.run_scripts(vec![
DeleteApp,
RestoreAppFromTrash,
ReadApp(app.id.clone()),
AssertApp(app),
])
.await;
}
#[tokio::test]
async fn app_read() {
let mut test = FolderTest::new().await;
let app = test.app.clone();
test.run_scripts(vec![ReadApp(app.id.clone()), AssertApp(app)]).await;
let mut test = FolderTest::new().await;
let app = test.app.clone();
test
.run_scripts(vec![ReadApp(app.id.clone()), AssertApp(app)])
.await;
}
#[tokio::test]
async fn app_update() {
let mut test = FolderTest::new().await;
let app = test.app.clone();
let new_name = "😁 hell world".to_owned();
assert_ne!(app.name, new_name);
let mut test = FolderTest::new().await;
let app = test.app.clone();
let new_name = "😁 hell world".to_owned();
assert_ne!(app.name, new_name);
test.run_scripts(vec![
UpdateApp {
name: Some(new_name.clone()),
desc: None,
},
ReadApp(app.id),
test
.run_scripts(vec![
UpdateApp {
name: Some(new_name.clone()),
desc: None,
},
ReadApp(app.id),
])
.await;
assert_eq!(test.app.name, new_name);
assert_eq!(test.app.name, new_name);
}
#[tokio::test]
async fn app_create_with_view() {
let mut test = FolderTest::new().await;
let mut app = test.app.clone();
test.run_scripts(vec![
CreateView {
name: "View A".to_owned(),
desc: "View A description".to_owned(),
data_type: ViewDataFormatPB::DeltaFormat,
},
CreateView {
name: "Grid".to_owned(),
desc: "Grid description".to_owned(),
data_type: ViewDataFormatPB::DatabaseFormat,
},
ReadApp(app.id),
let mut test = FolderTest::new().await;
let mut app = test.app.clone();
test
.run_scripts(vec![
CreateView {
name: "View A".to_owned(),
desc: "View A description".to_owned(),
data_type: ViewDataFormatPB::DeltaFormat,
},
CreateView {
name: "Grid".to_owned(),
desc: "Grid description".to_owned(),
data_type: ViewDataFormatPB::DatabaseFormat,
},
ReadApp(app.id),
])
.await;
app = test.app.clone();
assert_eq!(app.belongings.len(), 3);
assert_eq!(app.belongings[1].name, "View A");
assert_eq!(app.belongings[2].name, "Grid")
app = test.app.clone();
assert_eq!(app.belongings.len(), 3);
assert_eq!(app.belongings[1].name, "View A");
assert_eq!(app.belongings[2].name, "Grid")
}
#[tokio::test]
async fn view_update() {
let mut test = FolderTest::new().await;
let view = test.view.clone();
let new_name = "😁 123".to_owned();
assert_ne!(view.name, new_name);
let mut test = FolderTest::new().await;
let view = test.view.clone();
let new_name = "😁 123".to_owned();
assert_ne!(view.name, new_name);
test.run_scripts(vec![
UpdateView {
name: Some(new_name.clone()),
desc: None,
},
ReadView(view.id),
test
.run_scripts(vec![
UpdateView {
name: Some(new_name.clone()),
desc: None,
},
ReadView(view.id),
])
.await;
assert_eq!(test.view.name, new_name);
assert_eq!(test.view.name, new_name);
}
#[tokio::test]
#[should_panic]
async fn view_delete() {
let mut test = FolderTest::new().await;
let view = test.view.clone();
test.run_scripts(vec![DeleteView, ReadView(view.id)]).await;
let mut test = FolderTest::new().await;
let view = test.view.clone();
test.run_scripts(vec![DeleteView, ReadView(view.id)]).await;
}
#[tokio::test]
async fn view_delete_then_restore() {
let mut test = FolderTest::new().await;
let view = test.view.clone();
test.run_scripts(vec![
DeleteView,
RestoreViewFromTrash,
ReadView(view.id.clone()),
AssertView(view),
let mut test = FolderTest::new().await;
let view = test.view.clone();
test
.run_scripts(vec![
DeleteView,
RestoreViewFromTrash,
ReadView(view.id.clone()),
AssertView(view),
])
.await;
}
#[tokio::test]
async fn view_delete_all() {
let mut test = FolderTest::new().await;
let app = test.app.clone();
test.run_scripts(vec![
CreateView {
name: "View A".to_owned(),
desc: "View A description".to_owned(),
data_type: ViewDataFormatPB::DeltaFormat,
},
CreateView {
name: "Grid".to_owned(),
desc: "Grid description".to_owned(),
data_type: ViewDataFormatPB::DatabaseFormat,
},
ReadApp(app.id.clone()),
let mut test = FolderTest::new().await;
let app = test.app.clone();
test
.run_scripts(vec![
CreateView {
name: "View A".to_owned(),
desc: "View A description".to_owned(),
data_type: ViewDataFormatPB::DeltaFormat,
},
CreateView {
name: "Grid".to_owned(),
desc: "Grid description".to_owned(),
data_type: ViewDataFormatPB::DatabaseFormat,
},
ReadApp(app.id.clone()),
])
.await;
assert_eq!(test.app.belongings.len(), 3);
let view_ids = test
.app
.belongings
.iter()
.map(|view| view.id.clone())
.collect::<Vec<String>>();
test.run_scripts(vec![DeleteViews(view_ids), ReadApp(app.id), ReadTrash])
.await;
assert_eq!(test.app.belongings.len(), 3);
let view_ids = test
.app
.belongings
.iter()
.map(|view| view.id.clone())
.collect::<Vec<String>>();
test
.run_scripts(vec![DeleteViews(view_ids), ReadApp(app.id), ReadTrash])
.await;
assert_eq!(test.app.belongings.len(), 0);
assert_eq!(test.trash.len(), 3);
assert_eq!(test.app.belongings.len(), 0);
assert_eq!(test.trash.len(), 3);
}
#[tokio::test]
async fn view_delete_all_permanent() {
let mut test = FolderTest::new().await;
let app = test.app.clone();
test.run_scripts(vec![
CreateView {
name: "View A".to_owned(),
desc: "View A description".to_owned(),
data_type: ViewDataFormatPB::DeltaFormat,
},
ReadApp(app.id.clone()),
let mut test = FolderTest::new().await;
let app = test.app.clone();
test
.run_scripts(vec![
CreateView {
name: "View A".to_owned(),
desc: "View A description".to_owned(),
data_type: ViewDataFormatPB::DeltaFormat,
},
ReadApp(app.id.clone()),
])
.await;
let view_ids = test
.app
.belongings
.iter()
.map(|view| view.id.clone())
.collect::<Vec<String>>();
test.run_scripts(vec![DeleteViews(view_ids), ReadApp(app.id), DeleteAllTrash, ReadTrash])
.await;
let view_ids = test
.app
.belongings
.iter()
.map(|view| view.id.clone())
.collect::<Vec<String>>();
test
.run_scripts(vec![
DeleteViews(view_ids),
ReadApp(app.id),
DeleteAllTrash,
ReadTrash,
])
.await;
assert_eq!(test.app.belongings.len(), 0);
assert_eq!(test.trash.len(), 0);
assert_eq!(test.app.belongings.len(), 0);
assert_eq!(test.trash.len(), 0);
}
#[tokio::test]
async fn folder_sync_revision_state() {
let mut test = FolderTest::new().await;
test.run_scripts(vec![
AssertRevisionState {
rev_id: 1,
state: RevisionState::Sync,
},
AssertNextSyncRevId(Some(1)),
AssertRevisionState {
rev_id: 1,
state: RevisionState::Ack,
},
let mut test = FolderTest::new().await;
test
.run_scripts(vec![
AssertRevisionState {
rev_id: 1,
state: RevisionState::Sync,
},
AssertNextSyncRevId(Some(1)),
AssertRevisionState {
rev_id: 1,
state: RevisionState::Ack,
},
])
.await;
}
#[tokio::test]
async fn folder_sync_revision_seq() {
let mut test = FolderTest::new().await;
test.run_scripts(vec![
AssertRevisionState {
rev_id: 1,
state: RevisionState::Sync,
},
AssertRevisionState {
rev_id: 2,
state: RevisionState::Sync,
},
AssertNextSyncRevId(Some(1)),
AssertNextSyncRevId(Some(2)),
AssertRevisionState {
rev_id: 1,
state: RevisionState::Ack,
},
AssertRevisionState {
rev_id: 2,
state: RevisionState::Ack,
},
let mut test = FolderTest::new().await;
test
.run_scripts(vec![
AssertRevisionState {
rev_id: 1,
state: RevisionState::Sync,
},
AssertRevisionState {
rev_id: 2,
state: RevisionState::Sync,
},
AssertNextSyncRevId(Some(1)),
AssertNextSyncRevId(Some(2)),
AssertRevisionState {
rev_id: 1,
state: RevisionState::Ack,
},
AssertRevisionState {
rev_id: 2,
state: RevisionState::Ack,
},
])
.await;
}

View File

@ -1,17 +1,17 @@
use flowy_folder::entities::view::{RepeatedViewIdPB, ViewIdPB};
use flowy_folder::entities::workspace::WorkspaceIdPB;
use flowy_folder::entities::{
app::{AppIdPB, CreateAppPayloadPB, UpdateAppPayloadPB},
trash::{RepeatedTrashPB, TrashIdPB, TrashType},
view::{CreateViewPayloadPB, UpdateViewPayloadPB},
workspace::{CreateWorkspacePayloadPB, RepeatedWorkspacePB},
ViewLayoutTypePB,
app::{AppIdPB, CreateAppPayloadPB, UpdateAppPayloadPB},
trash::{RepeatedTrashPB, TrashIdPB, TrashType},
view::{CreateViewPayloadPB, UpdateViewPayloadPB},
workspace::{CreateWorkspacePayloadPB, RepeatedWorkspacePB},
ViewLayoutTypePB,
};
use flowy_folder::entities::{
app::{AppPB, RepeatedAppPB},
trash::TrashPB,
view::{RepeatedViewPB, ViewDataFormatPB, ViewPB},
workspace::WorkspacePB,
app::{AppPB, RepeatedAppPB},
trash::TrashPB,
view::{RepeatedViewPB, ViewDataFormatPB, ViewPB},
workspace::WorkspacePB,
};
use flowy_folder::event_map::FolderEvent::*;
use flowy_folder::{errors::ErrorCode, services::folder_editor::FolderEditor};
@ -22,425 +22,449 @@ use std::{sync::Arc, time::Duration};
use tokio::time::sleep;
pub enum FolderScript {
// Workspace
ReadAllWorkspaces,
CreateWorkspace {
name: String,
desc: String,
},
// AssertWorkspaceRevisionJson(String),
AssertWorkspace(WorkspacePB),
ReadWorkspace(Option<String>),
// Workspace
ReadAllWorkspaces,
CreateWorkspace {
name: String,
desc: String,
},
// AssertWorkspaceRevisionJson(String),
AssertWorkspace(WorkspacePB),
ReadWorkspace(Option<String>),
// App
CreateApp {
name: String,
desc: String,
},
// AssertAppRevisionJson(String),
AssertApp(AppPB),
ReadApp(String),
UpdateApp {
name: Option<String>,
desc: Option<String>,
},
DeleteApp,
// App
CreateApp {
name: String,
desc: String,
},
// AssertAppRevisionJson(String),
AssertApp(AppPB),
ReadApp(String),
UpdateApp {
name: Option<String>,
desc: Option<String>,
},
DeleteApp,
// View
CreateView {
name: String,
desc: String,
data_type: ViewDataFormatPB,
},
AssertView(ViewPB),
ReadView(String),
UpdateView {
name: Option<String>,
desc: Option<String>,
},
DeleteView,
DeleteViews(Vec<String>),
// View
CreateView {
name: String,
desc: String,
data_type: ViewDataFormatPB,
},
AssertView(ViewPB),
ReadView(String),
UpdateView {
name: Option<String>,
desc: Option<String>,
},
DeleteView,
DeleteViews(Vec<String>),
// Trash
RestoreAppFromTrash,
RestoreViewFromTrash,
ReadTrash,
DeleteAllTrash,
// Trash
RestoreAppFromTrash,
RestoreViewFromTrash,
ReadTrash,
DeleteAllTrash,
// Sync
#[allow(dead_code)]
AssertCurrentRevId(i64),
AssertNextSyncRevId(Option<i64>),
AssertRevisionState {
rev_id: i64,
state: RevisionState,
},
// Sync
#[allow(dead_code)]
AssertCurrentRevId(i64),
AssertNextSyncRevId(Option<i64>),
AssertRevisionState {
rev_id: i64,
state: RevisionState,
},
}
pub struct FolderTest {
pub sdk: FlowySDKTest,
pub all_workspace: Vec<WorkspacePB>,
pub workspace: WorkspacePB,
pub app: AppPB,
pub view: ViewPB,
pub trash: Vec<TrashPB>,
// pub folder_editor:
pub sdk: FlowySDKTest,
pub all_workspace: Vec<WorkspacePB>,
pub workspace: WorkspacePB,
pub app: AppPB,
pub view: ViewPB,
pub trash: Vec<TrashPB>,
// pub folder_editor:
}
impl FolderTest {
pub async fn new() -> Self {
let sdk = FlowySDKTest::default();
let _ = sdk.init_user().await;
let mut workspace = create_workspace(&sdk, "FolderWorkspace", "Folder test workspace").await;
let mut app = create_app(&sdk, &workspace.id, "Folder App", "Folder test app").await;
let view = create_view(
&sdk,
&app.id,
"Folder View",
"Folder test view",
ViewDataFormatPB::DeltaFormat,
ViewLayoutTypePB::Document,
)
.await;
app.belongings = RepeatedViewPB {
items: vec![view.clone()],
pub async fn new() -> Self {
let sdk = FlowySDKTest::default();
let _ = sdk.init_user().await;
let mut workspace = create_workspace(&sdk, "FolderWorkspace", "Folder test workspace").await;
let mut app = create_app(&sdk, &workspace.id, "Folder App", "Folder test app").await;
let view = create_view(
&sdk,
&app.id,
"Folder View",
"Folder test view",
ViewDataFormatPB::DeltaFormat,
ViewLayoutTypePB::Document,
)
.await;
app.belongings = RepeatedViewPB {
items: vec![view.clone()],
};
workspace.apps = RepeatedAppPB {
items: vec![app.clone()],
};
Self {
sdk,
all_workspace: vec![],
workspace,
app,
view,
trash: vec![],
}
}
pub async fn run_scripts(&mut self, scripts: Vec<FolderScript>) {
for script in scripts {
self.run_script(script).await;
}
}
pub async fn run_script(&mut self, script: FolderScript) {
let sdk = &self.sdk;
let folder_editor: Arc<FolderEditor> = sdk.folder_manager.folder_editor().await;
let rev_manager = folder_editor.rev_manager();
let cache = rev_manager.revision_cache().await;
match script {
FolderScript::ReadAllWorkspaces => {
let all_workspace = read_workspace(sdk, None).await;
self.all_workspace = all_workspace;
},
FolderScript::CreateWorkspace { name, desc } => {
let workspace = create_workspace(sdk, &name, &desc).await;
self.workspace = workspace;
},
// FolderScript::AssertWorkspaceRevisionJson(expected_json) => {
// let workspace = read_workspace(sdk, Some(self.workspace.id.clone()))
// .await
// .pop()
// .unwrap();
// let workspace_revision: WorkspaceRevision = workspace.into();
// let json = serde_json::to_string(&workspace_revision).unwrap();
// assert_eq!(json, expected_json);
// }
FolderScript::AssertWorkspace(workspace) => {
assert_eq!(self.workspace, workspace, "Workspace not equal");
},
FolderScript::ReadWorkspace(workspace_id) => {
let workspace = read_workspace(sdk, workspace_id).await.pop().unwrap();
self.workspace = workspace;
},
FolderScript::CreateApp { name, desc } => {
let app = create_app(sdk, &self.workspace.id, &name, &desc).await;
self.app = app;
},
// FolderScript::AssertAppRevisionJson(expected_json) => {
// let app_revision: AppRevision = self.app.clone().into();
// let json = serde_json::to_string(&app_revision).unwrap();
// assert_eq!(json, expected_json);
// }
FolderScript::AssertApp(app) => {
assert_eq!(self.app, app, "App not equal");
},
FolderScript::ReadApp(app_id) => {
let app = read_app(sdk, &app_id).await;
self.app = app;
},
FolderScript::UpdateApp { name, desc } => {
update_app(sdk, &self.app.id, name, desc).await;
},
FolderScript::DeleteApp => {
delete_app(sdk, &self.app.id).await;
},
FolderScript::CreateView {
name,
desc,
data_type,
} => {
let layout = match data_type {
ViewDataFormatPB::DeltaFormat => ViewLayoutTypePB::Document,
ViewDataFormatPB::NodeFormat => ViewLayoutTypePB::Document,
ViewDataFormatPB::DatabaseFormat => ViewLayoutTypePB::Grid,
};
workspace.apps = RepeatedAppPB {
items: vec![app.clone()],
};
Self {
sdk,
all_workspace: vec![],
workspace,
app,
view,
trash: vec![],
let view = create_view(sdk, &self.app.id, &name, &desc, data_type, layout).await;
self.view = view;
},
FolderScript::AssertView(view) => {
assert_eq!(self.view, view, "View not equal");
},
FolderScript::ReadView(view_id) => {
let view = read_view(sdk, &view_id).await;
self.view = view;
},
FolderScript::UpdateView { name, desc } => {
update_view(sdk, &self.view.id, name, desc).await;
},
FolderScript::DeleteView => {
delete_view(sdk, vec![self.view.id.clone()]).await;
},
FolderScript::DeleteViews(view_ids) => {
delete_view(sdk, view_ids).await;
},
FolderScript::RestoreAppFromTrash => {
restore_app_from_trash(sdk, &self.app.id).await;
},
FolderScript::RestoreViewFromTrash => {
restore_view_from_trash(sdk, &self.view.id).await;
},
FolderScript::ReadTrash => {
let mut trash = read_trash(sdk).await;
self.trash = trash.into_inner();
},
FolderScript::DeleteAllTrash => {
delete_all_trash(sdk).await;
self.trash = vec![];
},
FolderScript::AssertRevisionState { rev_id, state } => {
let record = cache.get(rev_id).await.unwrap();
assert_eq!(record.state, state, "Revision state is not match");
if let RevisionState::Ack = state {
// There is a defer action that writes the revisions to disk, so we wait here.
// Make sure everything is written.
sleep(Duration::from_millis(2 * REVISION_WRITE_INTERVAL_IN_MILLIS)).await;
}
}
pub async fn run_scripts(&mut self, scripts: Vec<FolderScript>) {
for script in scripts {
self.run_script(script).await;
}
}
pub async fn run_script(&mut self, script: FolderScript) {
let sdk = &self.sdk;
let folder_editor: Arc<FolderEditor> = sdk.folder_manager.folder_editor().await;
let rev_manager = folder_editor.rev_manager();
let cache = rev_manager.revision_cache().await;
match script {
FolderScript::ReadAllWorkspaces => {
let all_workspace = read_workspace(sdk, None).await;
self.all_workspace = all_workspace;
}
FolderScript::CreateWorkspace { name, desc } => {
let workspace = create_workspace(sdk, &name, &desc).await;
self.workspace = workspace;
}
// FolderScript::AssertWorkspaceRevisionJson(expected_json) => {
// let workspace = read_workspace(sdk, Some(self.workspace.id.clone()))
// .await
// .pop()
// .unwrap();
// let workspace_revision: WorkspaceRevision = workspace.into();
// let json = serde_json::to_string(&workspace_revision).unwrap();
// assert_eq!(json, expected_json);
// }
FolderScript::AssertWorkspace(workspace) => {
assert_eq!(self.workspace, workspace, "Workspace not equal");
}
FolderScript::ReadWorkspace(workspace_id) => {
let workspace = read_workspace(sdk, workspace_id).await.pop().unwrap();
self.workspace = workspace;
}
FolderScript::CreateApp { name, desc } => {
let app = create_app(sdk, &self.workspace.id, &name, &desc).await;
self.app = app;
}
// FolderScript::AssertAppRevisionJson(expected_json) => {
// let app_revision: AppRevision = self.app.clone().into();
// let json = serde_json::to_string(&app_revision).unwrap();
// assert_eq!(json, expected_json);
// }
FolderScript::AssertApp(app) => {
assert_eq!(self.app, app, "App not equal");
}
FolderScript::ReadApp(app_id) => {
let app = read_app(sdk, &app_id).await;
self.app = app;
}
FolderScript::UpdateApp { name, desc } => {
update_app(sdk, &self.app.id, name, desc).await;
}
FolderScript::DeleteApp => {
delete_app(sdk, &self.app.id).await;
}
FolderScript::CreateView { name, desc, data_type } => {
let layout = match data_type {
ViewDataFormatPB::DeltaFormat => ViewLayoutTypePB::Document,
ViewDataFormatPB::NodeFormat => ViewLayoutTypePB::Document,
ViewDataFormatPB::DatabaseFormat => ViewLayoutTypePB::Grid,
};
let view = create_view(sdk, &self.app.id, &name, &desc, data_type, layout).await;
self.view = view;
}
FolderScript::AssertView(view) => {
assert_eq!(self.view, view, "View not equal");
}
FolderScript::ReadView(view_id) => {
let view = read_view(sdk, &view_id).await;
self.view = view;
}
FolderScript::UpdateView { name, desc } => {
update_view(sdk, &self.view.id, name, desc).await;
}
FolderScript::DeleteView => {
delete_view(sdk, vec![self.view.id.clone()]).await;
}
FolderScript::DeleteViews(view_ids) => {
delete_view(sdk, view_ids).await;
}
FolderScript::RestoreAppFromTrash => {
restore_app_from_trash(sdk, &self.app.id).await;
}
FolderScript::RestoreViewFromTrash => {
restore_view_from_trash(sdk, &self.view.id).await;
}
FolderScript::ReadTrash => {
let mut trash = read_trash(sdk).await;
self.trash = trash.into_inner();
}
FolderScript::DeleteAllTrash => {
delete_all_trash(sdk).await;
self.trash = vec![];
}
FolderScript::AssertRevisionState { rev_id, state } => {
let record = cache.get(rev_id).await.unwrap();
assert_eq!(record.state, state, "Revision state is not match");
if let RevisionState::Ack = state {
// There is a defer action that writes the revisions to disk, so we wait here.
// Make sure everything is written.
sleep(Duration::from_millis(2 * REVISION_WRITE_INTERVAL_IN_MILLIS)).await;
}
}
FolderScript::AssertCurrentRevId(rev_id) => {
assert_eq!(rev_manager.rev_id(), rev_id, "Current rev_id is not match");
}
FolderScript::AssertNextSyncRevId(rev_id) => {
let next_revision = rev_manager.next_sync_revision().await.unwrap();
if rev_id.is_none() {
assert!(next_revision.is_none(), "Next revision should be None");
return;
}
let next_revision = next_revision
.unwrap_or_else(|| panic!("Expected Next revision is {}, but receive None", rev_id.unwrap()));
let mut notify = rev_manager.ack_notify();
let _ = notify.recv().await;
assert_eq!(next_revision.rev_id, rev_id.unwrap(), "Revision id not match");
}
},
FolderScript::AssertCurrentRevId(rev_id) => {
assert_eq!(rev_manager.rev_id(), rev_id, "Current rev_id is not match");
},
FolderScript::AssertNextSyncRevId(rev_id) => {
let next_revision = rev_manager.next_sync_revision().await.unwrap();
if rev_id.is_none() {
assert!(next_revision.is_none(), "Next revision should be None");
return;
}
let next_revision = next_revision.unwrap_or_else(|| {
panic!(
"Expected Next revision is {}, but receive None",
rev_id.unwrap()
)
});
let mut notify = rev_manager.ack_notify();
let _ = notify.recv().await;
assert_eq!(
next_revision.rev_id,
rev_id.unwrap(),
"Revision id not match"
);
},
}
}
}
pub fn invalid_workspace_name_test_case() -> Vec<(String, ErrorCode)> {
vec![
("".to_owned(), ErrorCode::WorkspaceNameInvalid),
("1234".repeat(100), ErrorCode::WorkspaceNameTooLong),
]
vec![
("".to_owned(), ErrorCode::WorkspaceNameInvalid),
("1234".repeat(100), ErrorCode::WorkspaceNameTooLong),
]
}
pub async fn create_workspace(sdk: &FlowySDKTest, name: &str, desc: &str) -> WorkspacePB {
let request = CreateWorkspacePayloadPB {
name: name.to_owned(),
desc: desc.to_owned(),
};
let request = CreateWorkspacePayloadPB {
name: name.to_owned(),
desc: desc.to_owned(),
};
FolderEventBuilder::new(sdk.clone())
.event(CreateWorkspace)
.payload(request)
.async_send()
.await
.parse::<WorkspacePB>()
FolderEventBuilder::new(sdk.clone())
.event(CreateWorkspace)
.payload(request)
.async_send()
.await
.parse::<WorkspacePB>()
}
pub async fn read_workspace(sdk: &FlowySDKTest, workspace_id: Option<String>) -> Vec<WorkspacePB> {
let request = WorkspaceIdPB { value: workspace_id };
let mut repeated_workspace = FolderEventBuilder::new(sdk.clone())
.event(ReadWorkspaces)
.payload(request.clone())
.async_send()
.await
.parse::<RepeatedWorkspacePB>();
let request = WorkspaceIdPB {
value: workspace_id,
};
let mut repeated_workspace = FolderEventBuilder::new(sdk.clone())
.event(ReadWorkspaces)
.payload(request.clone())
.async_send()
.await
.parse::<RepeatedWorkspacePB>();
let workspaces;
if let Some(workspace_id) = &request.value {
workspaces = repeated_workspace
.into_inner()
.into_iter()
.filter(|workspace| &workspace.id == workspace_id)
.collect::<Vec<WorkspacePB>>();
debug_assert_eq!(workspaces.len(), 1);
} else {
workspaces = repeated_workspace.items;
}
let workspaces;
if let Some(workspace_id) = &request.value {
workspaces = repeated_workspace
.into_inner()
.into_iter()
.filter(|workspace| &workspace.id == workspace_id)
.collect::<Vec<WorkspacePB>>();
debug_assert_eq!(workspaces.len(), 1);
} else {
workspaces = repeated_workspace.items;
}
workspaces
workspaces
}
pub async fn create_app(sdk: &FlowySDKTest, workspace_id: &str, name: &str, desc: &str) -> AppPB {
let create_app_request = CreateAppPayloadPB {
workspace_id: workspace_id.to_owned(),
name: name.to_string(),
desc: desc.to_string(),
color_style: Default::default(),
};
let create_app_request = CreateAppPayloadPB {
workspace_id: workspace_id.to_owned(),
name: name.to_string(),
desc: desc.to_string(),
color_style: Default::default(),
};
FolderEventBuilder::new(sdk.clone())
.event(CreateApp)
.payload(create_app_request)
.async_send()
.await
.parse::<AppPB>()
FolderEventBuilder::new(sdk.clone())
.event(CreateApp)
.payload(create_app_request)
.async_send()
.await
.parse::<AppPB>()
}
pub async fn read_app(sdk: &FlowySDKTest, app_id: &str) -> AppPB {
let request = AppIdPB {
value: app_id.to_owned(),
};
let request = AppIdPB {
value: app_id.to_owned(),
};
FolderEventBuilder::new(sdk.clone())
.event(ReadApp)
.payload(request)
.async_send()
.await
.parse::<AppPB>()
FolderEventBuilder::new(sdk.clone())
.event(ReadApp)
.payload(request)
.async_send()
.await
.parse::<AppPB>()
}
pub async fn update_app(sdk: &FlowySDKTest, app_id: &str, name: Option<String>, desc: Option<String>) {
let request = UpdateAppPayloadPB {
app_id: app_id.to_string(),
name,
desc,
color_style: None,
is_trash: None,
};
pub async fn update_app(
sdk: &FlowySDKTest,
app_id: &str,
name: Option<String>,
desc: Option<String>,
) {
let request = UpdateAppPayloadPB {
app_id: app_id.to_string(),
name,
desc,
color_style: None,
is_trash: None,
};
FolderEventBuilder::new(sdk.clone())
.event(UpdateApp)
.payload(request)
.async_send()
.await;
FolderEventBuilder::new(sdk.clone())
.event(UpdateApp)
.payload(request)
.async_send()
.await;
}
pub async fn delete_app(sdk: &FlowySDKTest, app_id: &str) {
let request = AppIdPB {
value: app_id.to_string(),
};
let request = AppIdPB {
value: app_id.to_string(),
};
FolderEventBuilder::new(sdk.clone())
.event(DeleteApp)
.payload(request)
.async_send()
.await;
FolderEventBuilder::new(sdk.clone())
.event(DeleteApp)
.payload(request)
.async_send()
.await;
}
pub async fn create_view(
sdk: &FlowySDKTest,
app_id: &str,
name: &str,
desc: &str,
data_type: ViewDataFormatPB,
layout: ViewLayoutTypePB,
sdk: &FlowySDKTest,
app_id: &str,
name: &str,
desc: &str,
data_type: ViewDataFormatPB,
layout: ViewLayoutTypePB,
) -> ViewPB {
let request = CreateViewPayloadPB {
belong_to_id: app_id.to_string(),
name: name.to_string(),
desc: desc.to_string(),
thumbnail: None,
data_format: data_type,
layout,
initial_data: vec![],
};
FolderEventBuilder::new(sdk.clone())
.event(CreateView)
.payload(request)
.async_send()
.await
.parse::<ViewPB>()
let request = CreateViewPayloadPB {
belong_to_id: app_id.to_string(),
name: name.to_string(),
desc: desc.to_string(),
thumbnail: None,
data_format: data_type,
layout,
initial_data: vec![],
};
FolderEventBuilder::new(sdk.clone())
.event(CreateView)
.payload(request)
.async_send()
.await
.parse::<ViewPB>()
}
pub async fn read_view(sdk: &FlowySDKTest, view_id: &str) -> ViewPB {
let view_id: ViewIdPB = view_id.into();
FolderEventBuilder::new(sdk.clone())
.event(ReadView)
.payload(view_id)
.async_send()
.await
.parse::<ViewPB>()
let view_id: ViewIdPB = view_id.into();
FolderEventBuilder::new(sdk.clone())
.event(ReadView)
.payload(view_id)
.async_send()
.await
.parse::<ViewPB>()
}
pub async fn update_view(sdk: &FlowySDKTest, view_id: &str, name: Option<String>, desc: Option<String>) {
let request = UpdateViewPayloadPB {
view_id: view_id.to_string(),
name,
desc,
thumbnail: None,
};
FolderEventBuilder::new(sdk.clone())
.event(UpdateView)
.payload(request)
.async_send()
.await;
pub async fn update_view(
sdk: &FlowySDKTest,
view_id: &str,
name: Option<String>,
desc: Option<String>,
) {
let request = UpdateViewPayloadPB {
view_id: view_id.to_string(),
name,
desc,
thumbnail: None,
};
FolderEventBuilder::new(sdk.clone())
.event(UpdateView)
.payload(request)
.async_send()
.await;
}
pub async fn delete_view(sdk: &FlowySDKTest, view_ids: Vec<String>) {
let request = RepeatedViewIdPB { items: view_ids };
FolderEventBuilder::new(sdk.clone())
.event(DeleteView)
.payload(request)
.async_send()
.await;
let request = RepeatedViewIdPB { items: view_ids };
FolderEventBuilder::new(sdk.clone())
.event(DeleteView)
.payload(request)
.async_send()
.await;
}
pub async fn read_trash(sdk: &FlowySDKTest) -> RepeatedTrashPB {
FolderEventBuilder::new(sdk.clone())
.event(ReadTrash)
.async_send()
.await
.parse::<RepeatedTrashPB>()
FolderEventBuilder::new(sdk.clone())
.event(ReadTrash)
.async_send()
.await
.parse::<RepeatedTrashPB>()
}
pub async fn restore_app_from_trash(sdk: &FlowySDKTest, app_id: &str) {
let id = TrashIdPB {
id: app_id.to_owned(),
ty: TrashType::TrashApp,
};
FolderEventBuilder::new(sdk.clone())
.event(PutbackTrash)
.payload(id)
.async_send()
.await;
let id = TrashIdPB {
id: app_id.to_owned(),
ty: TrashType::TrashApp,
};
FolderEventBuilder::new(sdk.clone())
.event(PutbackTrash)
.payload(id)
.async_send()
.await;
}
pub async fn restore_view_from_trash(sdk: &FlowySDKTest, view_id: &str) {
let id = TrashIdPB {
id: view_id.to_owned(),
ty: TrashType::TrashView,
};
FolderEventBuilder::new(sdk.clone())
.event(PutbackTrash)
.payload(id)
.async_send()
.await;
let id = TrashIdPB {
id: view_id.to_owned(),
ty: TrashType::TrashView,
};
FolderEventBuilder::new(sdk.clone())
.event(PutbackTrash)
.payload(id)
.async_send()
.await;
}
pub async fn delete_all_trash(sdk: &FlowySDKTest) {
FolderEventBuilder::new(sdk.clone())
.event(DeleteAllTrash)
.async_send()
.await;
FolderEventBuilder::new(sdk.clone())
.event(DeleteAllTrash)
.async_send()
.await;
}