mirror of
https://github.com/AppFlowy-IO/AppFlowy.git
synced 2024-08-30 18:12:39 +00:00
feat: add icon field (#4824)
* feat: add icon field * fix: add sqlx migration files * chore: fix tst * chore: fix duplicate event name * chore: update to lastest stable rust toolchain * chore: use 1.75 channel * chore: fix duplicate event name * chore: fix duplicate event name * chore: use more reliable assertion --------- Co-authored-by: nathan <nathan@appflowy.io>
This commit is contained in:
parent
af16299c83
commit
bf70be1841
@ -109,7 +109,7 @@ class UserBackendService {
|
||||
final request = CreateWorkspacePayloadPB.create()
|
||||
..name = name
|
||||
..desc = desc;
|
||||
return FolderEventCreateWorkspace(request).send().then((result) {
|
||||
return FolderEventCreateFolderWorkspace(request).send().then((result) {
|
||||
return result.fold(
|
||||
(workspace) => FlowyResult.success(workspace),
|
||||
(error) => FlowyResult.failure(error),
|
||||
|
@ -63,19 +63,11 @@ impl EventBuilder {
|
||||
match response.clone().parse::<R, FlowyError>() {
|
||||
Ok(Ok(data)) => data,
|
||||
Ok(Err(e)) => {
|
||||
panic!(
|
||||
"Parser {:?} failed: {:?}, response {:?}",
|
||||
std::any::type_name::<R>(),
|
||||
e,
|
||||
response
|
||||
)
|
||||
panic!("Parser {:?} failed: {:?}", std::any::type_name::<R>(), e)
|
||||
},
|
||||
Err(e) => {
|
||||
panic!("Parser {:?} failed: {:?}", std::any::type_name::<R>(), e)
|
||||
},
|
||||
Err(e) => panic!(
|
||||
"Dispatch {:?} failed: {:?}, response {:?}",
|
||||
std::any::type_name::<R>(),
|
||||
e,
|
||||
response
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -210,7 +210,7 @@ async fn create_workspace(sdk: &EventIntegrationTest, name: &str, desc: &str) ->
|
||||
};
|
||||
|
||||
EventBuilder::new(sdk.clone())
|
||||
.event(CreateWorkspace)
|
||||
.event(CreateFolderWorkspace)
|
||||
.payload(request)
|
||||
.async_send()
|
||||
.await
|
||||
|
@ -210,7 +210,7 @@ pub async fn create_workspace(sdk: &EventIntegrationTest, name: &str, desc: &str
|
||||
};
|
||||
|
||||
EventBuilder::new(sdk.clone())
|
||||
.event(CreateWorkspace)
|
||||
.event(CreateFolderWorkspace)
|
||||
.payload(request)
|
||||
.async_send()
|
||||
.await
|
||||
|
@ -12,7 +12,7 @@ async fn create_workspace_event_test() {
|
||||
desc: "".to_owned(),
|
||||
};
|
||||
let view_pb = EventBuilder::new(test)
|
||||
.event(flowy_folder::event_map::FolderEvent::CreateWorkspace)
|
||||
.event(flowy_folder::event_map::FolderEvent::CreateFolderWorkspace)
|
||||
.payload(request)
|
||||
.async_send()
|
||||
.await
|
||||
@ -474,7 +474,7 @@ async fn create_parent_view_with_invalid_name() {
|
||||
};
|
||||
assert_eq!(
|
||||
EventBuilder::new(sdk)
|
||||
.event(flowy_folder::event_map::FolderEvent::CreateWorkspace)
|
||||
.event(flowy_folder::event_map::FolderEvent::CreateFolderWorkspace)
|
||||
.payload(request)
|
||||
.async_send()
|
||||
.await
|
||||
|
@ -58,7 +58,10 @@ async fn af_cloud_create_workspace_test() {
|
||||
|
||||
let workspaces = get_synced_workspaces(&test, user_profile_pb.id).await;
|
||||
assert_eq!(workspaces.len(), 2);
|
||||
assert_eq!(workspaces[1].name, "my second workspace".to_string());
|
||||
let _second_workspace = workspaces
|
||||
.iter()
|
||||
.find(|w| w.name == "my second workspace")
|
||||
.expect("created workspace not found");
|
||||
|
||||
{
|
||||
// before opening new workspace
|
||||
|
@ -11,7 +11,7 @@ use crate::manager::FolderManager;
|
||||
pub fn init(folder: Weak<FolderManager>) -> AFPlugin {
|
||||
AFPlugin::new().name("Flowy-Folder").state(folder)
|
||||
// Workspace
|
||||
.event(FolderEvent::CreateWorkspace, create_workspace_handler)
|
||||
.event(FolderEvent::CreateFolderWorkspace, create_workspace_handler)
|
||||
.event(FolderEvent::GetCurrentWorkspaceSetting, read_current_workspace_setting_handler)
|
||||
.event(FolderEvent::ReadCurrentWorkspace, read_current_workspace_handler)
|
||||
.event(FolderEvent::ReadWorkspaceViews, get_workspace_views_handler)
|
||||
@ -45,7 +45,7 @@ pub fn init(folder: Weak<FolderManager>) -> AFPlugin {
|
||||
pub enum FolderEvent {
|
||||
/// Create a new workspace
|
||||
#[event(input = "CreateWorkspacePayloadPB", output = "WorkspacePB")]
|
||||
CreateWorkspace = 0,
|
||||
CreateFolderWorkspace = 0,
|
||||
|
||||
/// Read the current opening workspace. Currently, we only support one workspace
|
||||
#[event(output = "WorkspaceSettingPB")]
|
||||
|
@ -409,6 +409,7 @@ fn to_user_workspace(af_workspace: AFWorkspace) -> UserWorkspace {
|
||||
name: af_workspace.workspace_name,
|
||||
created_at: af_workspace.created_at,
|
||||
workspace_database_object_id: af_workspace.database_storage_id.to_string(),
|
||||
icon: af_workspace.icon,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -214,5 +214,6 @@ fn make_user_workspace() -> UserWorkspace {
|
||||
name: "My Workspace".to_string(),
|
||||
created_at: Default::default(),
|
||||
workspace_database_object_id: uuid::Uuid::new_v4().to_string(),
|
||||
icon: "".to_string(),
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,2 @@
|
||||
-- This file should undo anything in `up.sql`
|
||||
ALTER TABLE user_workspace_table DROP COLUMN icon TEXT;
|
@ -0,0 +1,2 @@
|
||||
-- Your SQL goes here
|
||||
ALTER TABLE user_workspace_table ADD COLUMN icon TEXT NOT NULL DEFAULT '';
|
@ -43,6 +43,7 @@ diesel::table! {
|
||||
uid -> BigInt,
|
||||
created_at -> BigInt,
|
||||
database_storage_id -> Text,
|
||||
icon -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -140,6 +140,8 @@ pub struct UserWorkspace {
|
||||
/// The database storage id is used indexing all the database views in current workspace.
|
||||
#[serde(rename = "database_storage_id")]
|
||||
pub workspace_database_object_id: String,
|
||||
#[serde(default)]
|
||||
pub icon: String,
|
||||
}
|
||||
|
||||
impl UserWorkspace {
|
||||
@ -149,6 +151,7 @@ impl UserWorkspace {
|
||||
name: "".to_string(),
|
||||
created_at: Utc::now(),
|
||||
workspace_database_object_id: Uuid::new_v4().to_string(),
|
||||
icon: "".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -63,6 +63,7 @@ impl<'de> Visitor<'de> for SessionVisitor {
|
||||
created_at: Utc::now(),
|
||||
// For historical reasons, the database_storage_id is constructed by the user_id.
|
||||
workspace_database_object_id: STANDARD.encode(format!("{}:user:database", user_id)),
|
||||
icon: "".to_owned(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -228,6 +228,9 @@ pub struct UserWorkspacePB {
|
||||
|
||||
#[pb(index = 3)]
|
||||
pub created_at_timestamp: i64,
|
||||
|
||||
#[pb(index = 4)]
|
||||
pub icon: String,
|
||||
}
|
||||
|
||||
impl From<UserWorkspace> for UserWorkspacePB {
|
||||
@ -236,6 +239,7 @@ impl From<UserWorkspace> for UserWorkspacePB {
|
||||
workspace_id: value.id,
|
||||
name: value.name,
|
||||
created_at_timestamp: value.created_at.timestamp(),
|
||||
icon: value.icon,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -15,6 +15,7 @@ pub struct UserWorkspaceTable {
|
||||
pub uid: i64,
|
||||
pub created_at: i64,
|
||||
pub database_storage_id: String,
|
||||
pub icon: String,
|
||||
}
|
||||
|
||||
pub fn get_user_workspace_op(workspace_id: &str, mut conn: DBConnection) -> Option<UserWorkspace> {
|
||||
@ -90,6 +91,7 @@ impl TryFrom<(i64, &UserWorkspace)> for UserWorkspaceTable {
|
||||
uid: value.0,
|
||||
created_at: value.1.created_at.timestamp(),
|
||||
database_storage_id: value.1.workspace_database_object_id.clone(),
|
||||
icon: value.1.icon.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -104,6 +106,7 @@ impl From<UserWorkspaceTable> for UserWorkspace {
|
||||
.single()
|
||||
.unwrap_or_default(),
|
||||
workspace_database_object_id: value.database_storage_id,
|
||||
icon: value.icon,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ use crate::module::AFPluginStateMap;
|
||||
use crate::runtime::AFPluginRuntime;
|
||||
use crate::{
|
||||
errors::{DispatchError, Error, InternalError},
|
||||
module::{as_plugin_map, AFPlugin, AFPluginMap, AFPluginRequest},
|
||||
module::{plugin_map_or_crash, AFPlugin, AFPluginMap, AFPluginRequest},
|
||||
response::AFPluginEventResponse,
|
||||
service::{AFPluginServiceFactory, Service},
|
||||
};
|
||||
@ -87,7 +87,7 @@ impl AFPluginDispatcher {
|
||||
pub fn new(runtime: Arc<AFPluginRuntime>, plugins: Vec<AFPlugin>) -> AFPluginDispatcher {
|
||||
tracing::trace!("{}", plugin_info(&plugins));
|
||||
AFPluginDispatcher {
|
||||
plugins: as_plugin_map(plugins),
|
||||
plugins: plugin_map_or_crash(plugins),
|
||||
runtime,
|
||||
}
|
||||
}
|
||||
|
@ -27,12 +27,16 @@ use crate::{
|
||||
};
|
||||
|
||||
pub type AFPluginMap = Arc<HashMap<AFPluginEvent, Arc<AFPlugin>>>;
|
||||
pub(crate) fn as_plugin_map(plugins: Vec<AFPlugin>) -> AFPluginMap {
|
||||
let mut plugin_map = HashMap::new();
|
||||
pub(crate) fn plugin_map_or_crash(plugins: Vec<AFPlugin>) -> AFPluginMap {
|
||||
let mut plugin_map: HashMap<AFPluginEvent, Arc<AFPlugin>> = HashMap::new();
|
||||
plugins.into_iter().for_each(|m| {
|
||||
let events = m.events();
|
||||
let plugins = Arc::new(m);
|
||||
events.into_iter().for_each(|e| {
|
||||
if plugin_map.contains_key(&e) {
|
||||
let plugin_name = plugin_map.get(&e).and_then(|p| Some(&p.name));
|
||||
panic!("⚠️⚠️⚠️Error: {:?} is already defined in {:?}", &e, plugin_name,);
|
||||
}
|
||||
plugin_map.insert(e, plugins.clone());
|
||||
});
|
||||
});
|
||||
@ -40,7 +44,7 @@ pub(crate) fn as_plugin_map(plugins: Vec<AFPlugin>) -> AFPluginMap {
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Hash, Debug, Clone)]
|
||||
pub struct AFPluginEvent(pub String);
|
||||
pub struct AFPluginEvent(String);
|
||||
|
||||
impl<T: Display + Eq + Hash + Debug + Clone> std::convert::From<T> for AFPluginEvent {
|
||||
fn from(t: T) -> Self {
|
||||
|
Loading…
Reference in New Issue
Block a user