mirror of
https://github.com/AppFlowy-IO/AppFlowy.git
synced 2024-08-30 18:12:39 +00:00
fix clippy warnings
This commit is contained in:
parent
8d9cde17a9
commit
3abd5b953e
@ -1,6 +1,6 @@
|
||||
use flowy_document_infra::protobuf::Doc;
|
||||
|
||||
pub(crate) const DOC_TABLE: &'static str = "doc_table";
|
||||
pub(crate) const DOC_TABLE: &str = "doc_table";
|
||||
|
||||
#[derive(Debug, Clone, sqlx::FromRow)]
|
||||
pub struct DocTable {
|
||||
|
@ -2,10 +2,10 @@ use chrono::Utc;
|
||||
use flowy_workspace_infra::protobuf::{App, RepeatedView, Trash, TrashType, View, ViewType, Workspace};
|
||||
use protobuf::ProtobufEnum;
|
||||
|
||||
pub(crate) const WORKSPACE_TABLE: &'static str = "workspace_table";
|
||||
pub(crate) const APP_TABLE: &'static str = "app_table";
|
||||
pub(crate) const VIEW_TABLE: &'static str = "view_table";
|
||||
pub(crate) const TRASH_TABLE: &'static str = "trash_table";
|
||||
pub(crate) const WORKSPACE_TABLE: &str = "workspace_table";
|
||||
pub(crate) const APP_TABLE: &str = "app_table";
|
||||
pub(crate) const VIEW_TABLE: &str = "view_table";
|
||||
pub(crate) const TRASH_TABLE: &str = "trash_table";
|
||||
|
||||
#[derive(Debug, Clone, sqlx::FromRow)]
|
||||
pub struct WorkspaceTable {
|
||||
|
@ -1,3 +1,4 @@
|
||||
#![allow(clippy::module_inception)]
|
||||
pub mod app;
|
||||
pub mod router;
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
#![allow(clippy::module_inception)]
|
||||
pub(crate) use crud::*;
|
||||
pub use router::*;
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
#![allow(clippy::module_inception)]
|
||||
pub mod router;
|
||||
mod trash;
|
||||
|
||||
|
@ -8,7 +8,7 @@ use backend_service::{
|
||||
errors::{invalid_params, ErrorCode, ServerError},
|
||||
response::FlowyResponse,
|
||||
};
|
||||
use chrono::Utc;
|
||||
|
||||
use flowy_user_infra::{
|
||||
parser::{UserEmail, UserName, UserPassword},
|
||||
protobuf::{SignInParams, SignInResponse, SignUpParams, SignUpResponse, UpdateUserParams, UserProfile},
|
||||
|
@ -1,3 +1,4 @@
|
||||
#![allow(clippy::module_inception)]
|
||||
pub mod router;
|
||||
pub mod sql_builder;
|
||||
mod view;
|
||||
|
@ -1,3 +1,4 @@
|
||||
#![allow(clippy::module_inception)]
|
||||
pub mod router;
|
||||
pub mod sql_builder;
|
||||
mod workspace;
|
||||
|
@ -1,3 +1,4 @@
|
||||
#![allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
mod c;
|
||||
mod model;
|
||||
mod protobuf;
|
||||
@ -28,7 +29,7 @@ pub extern "C" fn init_sdk(path: *mut c_char) -> i64 {
|
||||
let config = FlowySDKConfig::new(path, server_config, "appflowy").log_filter("debug");
|
||||
*FLOWY_SDK.write() = Some(Arc::new(FlowySDK::new(config)));
|
||||
|
||||
return 1;
|
||||
0
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
@ -62,7 +63,7 @@ pub extern "C" fn sync_command(input: *const u8, len: usize) -> *const u8 {
|
||||
#[no_mangle]
|
||||
pub extern "C" fn set_stream_port(port: i64) -> i32 {
|
||||
dart_notify::dart::DartStreamSender::set_port(port);
|
||||
return 0;
|
||||
0
|
||||
}
|
||||
|
||||
#[inline(never)]
|
||||
|
@ -21,6 +21,6 @@ impl FFIRequest {
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::Into<ModuleRequest> for FFIRequest {
|
||||
fn into(self) -> ModuleRequest { ModuleRequest::new(self.event).payload(self.payload) }
|
||||
impl std::convert::From<FFIRequest> for ModuleRequest {
|
||||
fn from(ffi_request: FFIRequest) -> Self { ModuleRequest::new(ffi_request.event).payload(ffi_request.payload) }
|
||||
}
|
||||
|
@ -54,15 +54,9 @@ impl DartNotifyBuilder {
|
||||
}
|
||||
|
||||
pub fn send(self) {
|
||||
let payload = match self.payload {
|
||||
None => None,
|
||||
Some(bytes) => Some(bytes.to_vec()),
|
||||
};
|
||||
let payload = self.payload.map(|bytes| bytes.to_vec());
|
||||
|
||||
let error = match self.error {
|
||||
None => None,
|
||||
Some(bytes) => Some(bytes.to_vec()),
|
||||
};
|
||||
let error = self.error.map(|bytes| bytes.to_vec());
|
||||
|
||||
let subject = SubscribeObject {
|
||||
source: self.source,
|
||||
|
@ -30,7 +30,7 @@ impl FlowyDocument {
|
||||
server_config: &ServerConfig,
|
||||
) -> FlowyDocument {
|
||||
let server = construct_doc_server(server_config);
|
||||
let doc_ctrl = Arc::new(DocController::new(server.clone(), user.clone(), ws_manager.clone()));
|
||||
let doc_ctrl = Arc::new(DocController::new(server, user.clone(), ws_manager));
|
||||
Self { doc_ctrl, user }
|
||||
}
|
||||
|
||||
|
@ -1,13 +1,13 @@
|
||||
use dart_notify::DartNotifyBuilder;
|
||||
use flowy_derive::ProtoBuf_Enum;
|
||||
const OBSERVABLE_CATEGORY: &'static str = "Doc";
|
||||
const OBSERVABLE_CATEGORY: &str = "Doc";
|
||||
#[derive(ProtoBuf_Enum, Debug)]
|
||||
pub(crate) enum DocObservable {
|
||||
UserCreateDoc = 0,
|
||||
}
|
||||
|
||||
impl std::convert::Into<i32> for DocObservable {
|
||||
fn into(self) -> i32 { self as i32 }
|
||||
impl std::convert::From<DocObservable> for i32 {
|
||||
fn from(o: DocObservable) -> Self { o as i32 }
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
|
@ -28,13 +28,12 @@ pub(crate) struct DocController {
|
||||
impl DocController {
|
||||
pub(crate) fn new(server: Server, user: Arc<dyn DocumentUser>, ws: Arc<WsDocumentManager>) -> Self {
|
||||
let cache = Arc::new(DocCache::new());
|
||||
let controller = Self {
|
||||
Self {
|
||||
server,
|
||||
user,
|
||||
ws_manager: ws,
|
||||
cache: cache.clone(),
|
||||
};
|
||||
controller
|
||||
cache,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn init(&self) -> DocResult<()> {
|
||||
@ -47,7 +46,7 @@ impl DocController {
|
||||
params: DocIdentifier,
|
||||
pool: Arc<ConnectionPool>,
|
||||
) -> Result<Arc<ClientEditDoc>, DocError> {
|
||||
if self.cache.contains(¶ms.doc_id) == false {
|
||||
if !self.cache.contains(¶ms.doc_id) {
|
||||
let edit_ctx = self.make_edit_context(¶ms.doc_id, pool.clone()).await?;
|
||||
return Ok(edit_ctx);
|
||||
}
|
||||
|
@ -65,7 +65,7 @@ impl RevisionManager {
|
||||
pub fn update_rev_id_counter_value(&self, rev_id: i64) { self.rev_id_counter.set(rev_id); }
|
||||
|
||||
pub async fn mk_revisions(&self, range: RevisionRange) -> Result<Revision, DocError> {
|
||||
debug_assert!(&range.doc_id == &self.doc_id);
|
||||
debug_assert!(range.doc_id == self.doc_id);
|
||||
let revisions = self.rev_store.revs_in_range(range.clone()).await?;
|
||||
let mut new_delta = Delta::new();
|
||||
for revision in revisions {
|
||||
@ -73,7 +73,7 @@ impl RevisionManager {
|
||||
Ok(delta) => {
|
||||
new_delta = new_delta.compose(&delta)?;
|
||||
},
|
||||
Err(_) => {},
|
||||
Err(e) => log::error!("{}", e),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -64,12 +64,11 @@ impl RevisionStore {
|
||||
let revs_map = self.revs_map.clone();
|
||||
let mut rx = sender.subscribe();
|
||||
tokio::spawn(async move {
|
||||
match rx.recv().await {
|
||||
Ok(rev_id) => match revs_map.get_mut(&rev_id) {
|
||||
if let Ok(rev_id) = rx.recv().await {
|
||||
match revs_map.get_mut(&rev_id) {
|
||||
None => {},
|
||||
Some(mut rev) => rev.value_mut().state = RevState::Acked,
|
||||
},
|
||||
Err(_) => {},
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@ -107,7 +106,7 @@ impl RevisionStore {
|
||||
|
||||
*self.defer_save.write().await = Some(tokio::spawn(async move {
|
||||
tokio::time::sleep(Duration::from_millis(300)).await;
|
||||
let ids = revs_map.iter().map(|kv| kv.key().clone()).collect::<Vec<i64>>();
|
||||
let ids = revs_map.iter().map(|kv| *kv.key()).collect::<Vec<i64>>();
|
||||
let revisions_state = revs_map
|
||||
.iter()
|
||||
.map(|kv| (kv.revision.clone(), kv.state))
|
||||
@ -208,7 +207,7 @@ async fn fetch_from_local(doc_id: &str, persistence: Arc<Persistence>) -> DocRes
|
||||
None => {},
|
||||
Some(op) => {
|
||||
let data = op.get_data();
|
||||
if !data.ends_with("\n") {
|
||||
if !data.ends_with('\n') {
|
||||
delta.ops.push(Operation::Insert("\n".into()))
|
||||
}
|
||||
},
|
||||
@ -232,7 +231,7 @@ fn validate_delta(doc_id: &str, persistence: Arc<Persistence>, conn: &SqliteConn
|
||||
}
|
||||
|
||||
let data = delta.ops.last().as_ref().unwrap().get_data();
|
||||
if !data.ends_with("\n") {
|
||||
if !data.ends_with('\n') {
|
||||
log::error!("The op must end with newline");
|
||||
let result = || {
|
||||
let revisions = persistence.rev_sql.read_rev_tables(&doc_id, conn)?;
|
||||
|
@ -58,15 +58,10 @@ impl WsDocumentManager {
|
||||
fn listen_ws_state_changed(ws: Arc<dyn DocumentWebSocket>, handlers: Arc<DashMap<String, Arc<dyn WsDocumentHandler>>>) {
|
||||
let mut notify = ws.state_notify();
|
||||
tokio::spawn(async move {
|
||||
loop {
|
||||
match notify.recv().await {
|
||||
Ok(state) => {
|
||||
handlers.iter().for_each(|handle| {
|
||||
handle.value().state_changed(&state);
|
||||
});
|
||||
},
|
||||
Err(_) => break,
|
||||
}
|
||||
while let Ok(state) = notify.recv().await {
|
||||
handlers.iter().for_each(|handle| {
|
||||
handle.value().state_changed(&state);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -46,23 +46,23 @@ impl RevState {
|
||||
}
|
||||
impl_sql_integer_expression!(RevState);
|
||||
|
||||
impl std::convert::Into<Revision> for RevTable {
|
||||
fn into(self) -> Revision {
|
||||
let md5 = md5(&self.data);
|
||||
impl std::convert::From<RevTable> for Revision {
|
||||
fn from(table: RevTable) -> Self {
|
||||
let md5 = md5(&table.data);
|
||||
Revision {
|
||||
base_rev_id: self.base_rev_id,
|
||||
rev_id: self.rev_id,
|
||||
delta_data: self.data,
|
||||
base_rev_id: table.base_rev_id,
|
||||
rev_id: table.rev_id,
|
||||
delta_data: table.data,
|
||||
md5,
|
||||
doc_id: self.doc_id,
|
||||
ty: self.ty.into(),
|
||||
doc_id: table.doc_id,
|
||||
ty: table.ty.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::Into<RevTableType> for RevType {
|
||||
fn into(self) -> RevTableType {
|
||||
match self {
|
||||
impl std::convert::From<RevType> for RevTableType {
|
||||
fn from(ty: RevType) -> Self {
|
||||
match ty {
|
||||
RevType::Local => RevTableType::Local,
|
||||
RevType::Remote => RevTableType::Remote,
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
#![allow(clippy::module_inception)]
|
||||
mod attribute_test;
|
||||
mod op_test;
|
||||
mod serde_test;
|
||||
@ -9,7 +10,7 @@ use lib_ot::core::*;
|
||||
use rand::{prelude::*, Rng as WrappedRng};
|
||||
use std::{sync::Once, time::Duration};
|
||||
|
||||
const LEVEL: &'static str = "debug";
|
||||
const LEVEL: &str = "debug";
|
||||
|
||||
#[derive(Clone, Debug, Display)]
|
||||
pub enum TestOp {
|
||||
|
@ -1,3 +1,4 @@
|
||||
#![allow(clippy::all)]
|
||||
use crate::editor::{Rng, TestBuilder, TestOp::*};
|
||||
use flowy_document_infra::core::{FlowyDoc, PlainDoc};
|
||||
use lib_ot::core::*;
|
||||
@ -229,8 +230,8 @@ fn delta_seek_4() {
|
||||
fn delta_seek_5() {
|
||||
let mut delta = Delta::default();
|
||||
let attributes = AttributeBuilder::new()
|
||||
.add(Attribute::Bold(true))
|
||||
.add(Attribute::Italic(true))
|
||||
.add_attr(Attribute::Bold(true))
|
||||
.add_attr(Attribute::Italic(true))
|
||||
.build();
|
||||
|
||||
delta.add(OpBuilder::insert("1234").attributes(attributes.clone()).build());
|
||||
@ -474,7 +475,7 @@ fn transform_random_delta() {
|
||||
fn transform_with_two_delta_test() {
|
||||
let mut a = Delta::default();
|
||||
let mut a_s = String::new();
|
||||
a.insert("123", AttributeBuilder::new().add(Attribute::Bold(true)).build());
|
||||
a.insert("123", AttributeBuilder::new().add_attr(Attribute::Bold(true)).build());
|
||||
a_s = a.apply(&a_s).unwrap();
|
||||
assert_eq!(&a_s, "123");
|
||||
|
||||
|
@ -4,8 +4,8 @@ use lib_ot::core::*;
|
||||
#[test]
|
||||
fn operation_insert_serialize_test() {
|
||||
let attributes = AttributeBuilder::new()
|
||||
.add(Attribute::Bold(true))
|
||||
.add(Attribute::Italic(true))
|
||||
.add_attr(Attribute::Bold(true))
|
||||
.add_attr(Attribute::Italic(true))
|
||||
.build();
|
||||
let operation = OpBuilder::insert("123").attributes(attributes).build();
|
||||
let json = serde_json::to_string(&operation).unwrap();
|
||||
@ -35,8 +35,8 @@ fn operation_delete_serialize_test() {
|
||||
#[test]
|
||||
fn attributes_serialize_test() {
|
||||
let attributes = AttributeBuilder::new()
|
||||
.add(Attribute::Bold(true))
|
||||
.add(Attribute::Italic(true))
|
||||
.add_attr(Attribute::Bold(true))
|
||||
.add_attr(Attribute::Italic(true))
|
||||
.build();
|
||||
let retain = OpBuilder::insert("123").attributes(attributes).build();
|
||||
|
||||
@ -49,8 +49,8 @@ fn delta_serialize_multi_attribute_test() {
|
||||
let mut delta = Delta::default();
|
||||
|
||||
let attributes = AttributeBuilder::new()
|
||||
.add(Attribute::Bold(true))
|
||||
.add(Attribute::Italic(true))
|
||||
.add_attr(Attribute::Bold(true))
|
||||
.add_attr(Attribute::Italic(true))
|
||||
.build();
|
||||
let retain = OpBuilder::insert("123").attributes(attributes).build();
|
||||
|
||||
|
@ -23,7 +23,7 @@ impl WorkspaceDepsResolver {
|
||||
|
||||
pub fn split_into(self) -> (Arc<dyn WorkspaceUser>, Arc<dyn WorkspaceDatabase>) {
|
||||
let user: Arc<dyn WorkspaceUser> = self.inner.clone();
|
||||
let database: Arc<dyn WorkspaceDatabase> = self.inner.clone();
|
||||
let database: Arc<dyn WorkspaceDatabase> = self.inner;
|
||||
(user, database)
|
||||
}
|
||||
}
|
||||
|
@ -43,7 +43,7 @@ impl FlowySDKConfig {
|
||||
}
|
||||
|
||||
fn crate_log_filter(level: Option<String>) -> String {
|
||||
let level = level.unwrap_or(std::env::var("RUST_LOG").unwrap_or("info".to_owned()));
|
||||
let level = level.unwrap_or_else(|| std::env::var("RUST_LOG").unwrap_or_else(|_| "info".to_owned()));
|
||||
let mut filters = vec![];
|
||||
filters.push(format!("flowy_sdk={}", level));
|
||||
filters.push(format!("flowy_workspace={}", level));
|
||||
@ -110,33 +110,30 @@ async fn _listen_user_status(
|
||||
workspace_controller: Arc<WorkspaceController>,
|
||||
) {
|
||||
loop {
|
||||
match subscribe.recv().await {
|
||||
Ok(status) => {
|
||||
let result = || async {
|
||||
match status {
|
||||
UserStatus::Login { token } => {
|
||||
let _ = workspace_controller.user_did_sign_in(&token).await?;
|
||||
},
|
||||
UserStatus::Logout { .. } => {
|
||||
workspace_controller.user_did_logout().await;
|
||||
},
|
||||
UserStatus::Expired { .. } => {
|
||||
workspace_controller.user_session_expired().await;
|
||||
},
|
||||
UserStatus::SignUp { profile, ret } => {
|
||||
let _ = workspace_controller.user_did_sign_up(&profile.token).await?;
|
||||
let _ = ret.send(());
|
||||
},
|
||||
}
|
||||
Ok::<(), WorkspaceError>(())
|
||||
};
|
||||
|
||||
match result().await {
|
||||
Ok(_) => {},
|
||||
Err(e) => log::error!("{}", e),
|
||||
if let Ok(status) = subscribe.recv().await {
|
||||
let result = || async {
|
||||
match status {
|
||||
UserStatus::Login { token } => {
|
||||
let _ = workspace_controller.user_did_sign_in(&token).await?;
|
||||
},
|
||||
UserStatus::Logout { .. } => {
|
||||
workspace_controller.user_did_logout().await;
|
||||
},
|
||||
UserStatus::Expired { .. } => {
|
||||
workspace_controller.user_session_expired().await;
|
||||
},
|
||||
UserStatus::SignUp { profile, ret } => {
|
||||
let _ = workspace_controller.user_did_sign_up(&profile.token).await?;
|
||||
let _ = ret.send(());
|
||||
},
|
||||
}
|
||||
},
|
||||
Err(_) => {},
|
||||
Ok::<(), WorkspaceError>(())
|
||||
};
|
||||
|
||||
match result().await {
|
||||
Ok(_) => {},
|
||||
Err(e) => log::error!("{}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -163,8 +160,7 @@ fn mk_workspace(
|
||||
flowy_document: Arc<FlowyDocument>,
|
||||
server_config: &ServerConfig,
|
||||
) -> Arc<WorkspaceController> {
|
||||
let workspace_deps = WorkspaceDepsResolver::new(user_session.clone());
|
||||
let workspace_deps = WorkspaceDepsResolver::new(user_session);
|
||||
let (user, database) = workspace_deps.split_into();
|
||||
let workspace_controller = flowy_workspace::module::mk_workspace(user, database, flowy_document, server_config);
|
||||
workspace_controller
|
||||
flowy_workspace::module::mk_workspace(user, database, flowy_document, server_config)
|
||||
}
|
||||
|
@ -10,15 +10,14 @@ pub fn mk_modules(workspace_controller: Arc<WorkspaceController>, user_session:
|
||||
vec![mk_user_module(user_session), mk_workspace_module(workspace_controller)]
|
||||
}
|
||||
|
||||
fn mk_user_module(user_session: Arc<UserSession>) -> Module { flowy_user::module::create(user_session.clone()) }
|
||||
fn mk_user_module(user_session: Arc<UserSession>) -> Module { flowy_user::module::create(user_session) }
|
||||
|
||||
fn mk_workspace_module(workspace_controller: Arc<WorkspaceController>) -> Module {
|
||||
flowy_workspace::module::create(workspace_controller)
|
||||
}
|
||||
|
||||
pub fn mk_document_module(user_session: Arc<UserSession>, server_config: &ServerConfig) -> Arc<FlowyDocument> {
|
||||
let document_deps = DocumentDepsResolver::new(user_session.clone());
|
||||
let document_deps = DocumentDepsResolver::new(user_session);
|
||||
let (user, ws_manager) = document_deps.split_into();
|
||||
let document = Arc::new(FlowyDocument::new(user, ws_manager, server_config));
|
||||
document
|
||||
Arc::new(FlowyDocument::new(user, ws_manager, server_config))
|
||||
}
|
||||
|
@ -16,7 +16,7 @@ use std::{fs, path::PathBuf, sync::Arc};
|
||||
|
||||
pub fn root_dir() -> String {
|
||||
// https://doc.rust-lang.org/cargo/reference/environment-variables.html
|
||||
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or("./".to_owned());
|
||||
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| "./".to_owned());
|
||||
let mut path_buf = fs::canonicalize(&PathBuf::from(&manifest_dir)).unwrap();
|
||||
path_buf.pop(); // rust-lib
|
||||
path_buf.push("flowy-test");
|
||||
@ -36,9 +36,9 @@ pub fn login_email() -> String { "annie2@appflowy.io".to_string() }
|
||||
|
||||
pub fn login_password() -> String { "HelloWorld!123".to_string() }
|
||||
|
||||
const DEFAULT_WORKSPACE_NAME: &'static str = "My workspace";
|
||||
const DEFAULT_WORKSPACE_DESC: &'static str = "This is your first workspace";
|
||||
const DEFAULT_WORKSPACE: &'static str = "Default_Workspace";
|
||||
const DEFAULT_WORKSPACE_NAME: &str = "My workspace";
|
||||
const DEFAULT_WORKSPACE_DESC: &str = "This is your first workspace";
|
||||
const DEFAULT_WORKSPACE: &str = "Default_Workspace";
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn create_default_workspace_if_need(dispatch: Arc<EventDispatch>, user_id: &str) -> Result<(), UserError> {
|
||||
@ -62,13 +62,13 @@ pub(crate) fn create_default_workspace_if_need(dispatch: Arc<EventDispatch>, use
|
||||
|
||||
let workspace = result.map_err(|e| UserError::internal().context(e))?;
|
||||
let query: Bytes = QueryWorkspaceRequest {
|
||||
workspace_id: Some(workspace.id.clone()),
|
||||
workspace_id: Some(workspace.id),
|
||||
}
|
||||
.into_bytes()
|
||||
.unwrap();
|
||||
|
||||
let request = ModuleRequest::new(OpenWorkspace).payload(query);
|
||||
let _result = EventDispatch::sync_send(dispatch.clone(), request)
|
||||
let _result = EventDispatch::sync_send(dispatch, request)
|
||||
.parse::<Workspace, WorkspaceError>()
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
@ -92,7 +92,7 @@ pub fn sign_up(dispatch: Arc<EventDispatch>) -> SignUpContext {
|
||||
.unwrap();
|
||||
|
||||
let request = ModuleRequest::new(SignUp).payload(payload);
|
||||
let user_profile = EventDispatch::sync_send(dispatch.clone(), request)
|
||||
let user_profile = EventDispatch::sync_send(dispatch, request)
|
||||
.parse::<UserProfile, UserError>()
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
@ -132,12 +132,10 @@ fn sign_in(dispatch: Arc<EventDispatch>) -> UserProfile {
|
||||
.unwrap();
|
||||
|
||||
let request = ModuleRequest::new(SignIn).payload(payload);
|
||||
let user_profile = EventDispatch::sync_send(dispatch, request)
|
||||
EventDispatch::sync_send(dispatch, request)
|
||||
.parse::<UserProfile, UserError>()
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
user_profile
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
|
@ -39,7 +39,7 @@ impl FlowyTest {
|
||||
}
|
||||
|
||||
pub fn setup_with(server_config: ServerConfig) -> Self {
|
||||
let config = FlowySDKConfig::new(&root_dir(), server_config, &uuid().to_string()).log_filter("debug");
|
||||
let config = FlowySDKConfig::new(&root_dir(), server_config, &uuid()).log_filter("debug");
|
||||
let sdk = FlowySDK::new(config);
|
||||
Self { sdk }
|
||||
}
|
||||
|
@ -133,7 +133,7 @@ async fn open_workspace(sdk: &FlowyTestSDK, workspace_id: &str) {
|
||||
}
|
||||
|
||||
pub async fn read_workspace(sdk: &FlowyTestSDK, request: QueryWorkspaceRequest) -> Vec<Workspace> {
|
||||
let mut repeated_workspace = FlowyWorkspaceTest::new(sdk.clone())
|
||||
let repeated_workspace = FlowyWorkspaceTest::new(sdk.clone())
|
||||
.event(ReadWorkspaces)
|
||||
.request(request.clone())
|
||||
.async_send()
|
||||
|
@ -2,7 +2,7 @@ use flowy_derive::ProtoBuf_Enum;
|
||||
|
||||
use dart_notify::DartNotifyBuilder;
|
||||
|
||||
const OBSERVABLE_CATEGORY: &'static str = "User";
|
||||
const OBSERVABLE_CATEGORY: &str = "User";
|
||||
|
||||
#[derive(ProtoBuf_Enum, Debug)]
|
||||
pub(crate) enum UserNotification {
|
||||
@ -16,8 +16,8 @@ impl std::default::Default for UserNotification {
|
||||
fn default() -> Self { UserNotification::Unknown }
|
||||
}
|
||||
|
||||
impl std::convert::Into<i32> for UserNotification {
|
||||
fn into(self) -> i32 { self as i32 }
|
||||
impl std::convert::From<UserNotification> for i32 {
|
||||
fn from(notification: UserNotification) -> Self { notification as i32 }
|
||||
}
|
||||
|
||||
pub(crate) fn dart_notify(id: &str, ty: UserNotification) -> DartNotifyBuilder {
|
||||
|
@ -5,8 +5,12 @@ pub struct UserSessionBuilder {
|
||||
config: Option<UserSessionConfig>,
|
||||
}
|
||||
|
||||
impl std::default::Default for UserSessionBuilder {
|
||||
fn default() -> Self { Self { config: None } }
|
||||
}
|
||||
|
||||
impl UserSessionBuilder {
|
||||
pub fn new() -> Self { Self { config: None } }
|
||||
pub fn new() -> Self { UserSessionBuilder::default() }
|
||||
|
||||
pub fn root_dir(mut self, dir: &str, server_config: &ServerConfig, session_cache_key: &str) -> Self {
|
||||
self.config = Some(UserSessionConfig::new(dir, server_config, session_cache_key));
|
||||
|
@ -33,7 +33,7 @@ impl UserDB {
|
||||
})?;
|
||||
|
||||
match DB_MAP.try_write_for(Duration::from_millis(300)) {
|
||||
None => Err(UserError::internal().context(format!("Acquire write lock to save user db failed"))),
|
||||
None => Err(UserError::internal().context("Acquire write lock to save user db failed")),
|
||||
Some(mut write_guard) => {
|
||||
write_guard.insert(user_id.to_owned(), db);
|
||||
Ok(())
|
||||
@ -43,7 +43,7 @@ impl UserDB {
|
||||
|
||||
pub(crate) fn close_user_db(&self, user_id: &str) -> Result<(), UserError> {
|
||||
match DB_MAP.try_write_for(Duration::from_millis(300)) {
|
||||
None => Err(UserError::internal().context(format!("Acquire write lock to close user db failed"))),
|
||||
None => Err(UserError::internal().context("Acquire write lock to close user db failed")),
|
||||
Some(mut write_guard) => {
|
||||
set_user_db_init(false, user_id);
|
||||
write_guard.remove(user_id);
|
||||
@ -71,7 +71,7 @@ impl UserDB {
|
||||
}
|
||||
|
||||
match DB_MAP.try_read_for(Duration::from_millis(300)) {
|
||||
None => Err(UserError::internal().context(format!("Acquire read lock to read user db failed"))),
|
||||
None => Err(UserError::internal().context("Acquire read lock to read user db failed")),
|
||||
Some(read_guard) => match read_guard.get(user_id) {
|
||||
None => Err(UserError::internal().context("Get connection failed. The database is not initialization")),
|
||||
Some(database) => Ok(database.get_pool()),
|
||||
@ -94,7 +94,7 @@ fn set_user_db_init(is_init: bool, user_id: &str) {
|
||||
fn is_user_db_init(user_id: &str) -> bool {
|
||||
match INIT_RECORD.lock().get(user_id) {
|
||||
None => false,
|
||||
Some(flag) => flag.clone(),
|
||||
Some(flag) => *flag,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -74,23 +74,19 @@ impl UserSession {
|
||||
let server = construct_user_server(&config.server_config);
|
||||
let ws_controller = Arc::new(WsController::new());
|
||||
let (status_notifier, _) = broadcast::channel(10);
|
||||
let user_session = Self {
|
||||
Self {
|
||||
database: db,
|
||||
config,
|
||||
server,
|
||||
session: RwLock::new(None),
|
||||
ws_controller,
|
||||
status_notifier,
|
||||
};
|
||||
user_session
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init(&self) {
|
||||
match self.get_session() {
|
||||
Ok(session) => {
|
||||
let _ = self.status_notifier.send(UserStatus::Login { token: session.token });
|
||||
},
|
||||
Err(_) => {},
|
||||
if let Ok(session) = self.get_session() {
|
||||
let _ = self.status_notifier.send(UserStatus::Login { token: session.token });
|
||||
}
|
||||
}
|
||||
|
||||
@ -397,10 +393,9 @@ impl std::convert::From<String> for Session {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::Into<String> for Session {
|
||||
fn into(self) -> String {
|
||||
match serde_json::to_string(&self) {
|
||||
impl std::convert::From<Session> for String {
|
||||
fn from(session: Session) -> Self {
|
||||
match serde_json::to_string(&session) {
|
||||
Ok(s) => s,
|
||||
Err(e) => {
|
||||
log::error!("Serialize session to string failed: {:?}", e);
|
||||
|
@ -37,13 +37,13 @@ impl std::convert::From<SignInResponse> for UserTable {
|
||||
fn from(resp: SignInResponse) -> Self { UserTable::new(resp.user_id, resp.name, resp.email, resp.token) }
|
||||
}
|
||||
|
||||
impl std::convert::Into<UserProfile> for UserTable {
|
||||
fn into(self) -> UserProfile {
|
||||
impl std::convert::From<UserTable> for UserProfile {
|
||||
fn from(table: UserTable) -> Self {
|
||||
UserProfile {
|
||||
id: self.id,
|
||||
email: self.email,
|
||||
name: self.name,
|
||||
token: self.token,
|
||||
id: table.id,
|
||||
email: table.email,
|
||||
name: table.name,
|
||||
token: table.token,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -114,6 +114,6 @@ pub(crate) async fn export_handler(
|
||||
controller: Unit<Arc<ViewController>>,
|
||||
) -> DataResult<ExportData, WorkspaceError> {
|
||||
let params: ExportParams = data.into_inner().try_into()?;
|
||||
let data = controller.export_doc(params.into()).await?;
|
||||
let data = controller.export_doc(params).await?;
|
||||
data_result(data)
|
||||
}
|
||||
|
@ -53,15 +53,14 @@ pub fn mk_workspace(
|
||||
server.clone(),
|
||||
));
|
||||
|
||||
let workspace_controller = Arc::new(WorkspaceController::new(
|
||||
user.clone(),
|
||||
database.clone(),
|
||||
app_controller.clone(),
|
||||
view_controller.clone(),
|
||||
trash_can.clone(),
|
||||
server.clone(),
|
||||
));
|
||||
workspace_controller
|
||||
Arc::new(WorkspaceController::new(
|
||||
user,
|
||||
database,
|
||||
app_controller,
|
||||
view_controller,
|
||||
trash_can,
|
||||
server,
|
||||
))
|
||||
}
|
||||
|
||||
pub fn create(workspace: Arc<WorkspaceController>) -> Module {
|
||||
|
@ -1,6 +1,6 @@
|
||||
use dart_notify::DartNotifyBuilder;
|
||||
use flowy_derive::ProtoBuf_Enum;
|
||||
const OBSERVABLE_CATEGORY: &'static str = "Workspace";
|
||||
const OBSERVABLE_CATEGORY: &str = "Workspace";
|
||||
|
||||
// Opti: Using the Rust macro to generate the serde code automatically that can
|
||||
// be use directly in flutter
|
||||
@ -25,8 +25,8 @@ impl std::default::Default for WorkspaceNotification {
|
||||
fn default() -> Self { WorkspaceNotification::Unknown }
|
||||
}
|
||||
|
||||
impl std::convert::Into<i32> for WorkspaceNotification {
|
||||
fn into(self) -> i32 { self as i32 }
|
||||
impl std::convert::From<WorkspaceNotification> for i32 {
|
||||
fn from(notification: WorkspaceNotification) -> Self { notification as i32 }
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug")]
|
||||
|
@ -61,7 +61,7 @@ impl AppController {
|
||||
}
|
||||
|
||||
pub(crate) fn save_app(&self, app: App, conn: &SqliteConnection) -> Result<(), WorkspaceError> {
|
||||
let app_table = AppTable::new(app.clone());
|
||||
let app_table = AppTable::new(app);
|
||||
let _ = AppTableSql::create_app(app_table, &*conn)?;
|
||||
Ok(())
|
||||
}
|
||||
@ -176,9 +176,8 @@ impl AppController {
|
||||
Err(_e) => None,
|
||||
}
|
||||
}));
|
||||
match stream.next().await {
|
||||
Some(event) => handle_trash_event(database.clone(), trash_can.clone(), event).await,
|
||||
None => {},
|
||||
if let Some(event) = stream.next().await {
|
||||
handle_trash_event(database.clone(), trash_can.clone(), event).await
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -25,16 +25,11 @@ impl WorkspaceServerAPI for WorkspaceServer {
|
||||
fn init(&self) {
|
||||
let mut rx = BACKEND_API_MIDDLEWARE.invalid_token_subscribe();
|
||||
tokio::spawn(async move {
|
||||
loop {
|
||||
match rx.recv().await {
|
||||
Ok(invalid_token) => {
|
||||
let error = WorkspaceError::new(ErrorCode::UserUnauthorized, "");
|
||||
send_dart_notification(&invalid_token, WorkspaceNotification::UserUnauthorized)
|
||||
.error(error)
|
||||
.send()
|
||||
},
|
||||
Err(_) => {},
|
||||
}
|
||||
while let Ok(invalid_token) = rx.recv().await {
|
||||
let error = WorkspaceError::new(ErrorCode::UserUnauthorized, "");
|
||||
send_dart_notification(&invalid_token, WorkspaceNotification::UserUnauthorized)
|
||||
.error(error)
|
||||
.send()
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -283,11 +283,9 @@ impl ViewController {
|
||||
Err(_e) => None,
|
||||
}
|
||||
}));
|
||||
match stream.next().await {
|
||||
Some(event) => {
|
||||
handle_trash_event(database.clone(), document.clone(), trash_can.clone(), event).await
|
||||
},
|
||||
None => {},
|
||||
|
||||
if let Some(event) = stream.next().await {
|
||||
handle_trash_event(database.clone(), document.clone(), trash_can.clone(), event).await
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -48,9 +48,9 @@ impl WorkspaceController {
|
||||
Self {
|
||||
user,
|
||||
workspace_sql,
|
||||
view_controller,
|
||||
database,
|
||||
app_controller,
|
||||
view_controller,
|
||||
trash_can,
|
||||
server,
|
||||
}
|
||||
@ -211,7 +211,7 @@ impl WorkspaceController {
|
||||
pub(crate) async fn open_workspace(&self, params: WorkspaceIdentifier) -> Result<Workspace, WorkspaceError> {
|
||||
let user_id = self.user.user_id()?;
|
||||
let conn = self.database.db_connection()?;
|
||||
if let Some(workspace_id) = params.workspace_id.clone() {
|
||||
if let Some(workspace_id) = params.workspace_id {
|
||||
let workspace = self.read_local_workspace(workspace_id, &user_id, &*conn)?;
|
||||
set_current_workspace(&workspace.id);
|
||||
Ok(workspace)
|
||||
@ -227,7 +227,7 @@ impl WorkspaceController {
|
||||
let user_id = self.user.user_id()?;
|
||||
let workspaces =
|
||||
self.read_local_workspaces(params.workspace_id.clone(), &user_id, &*self.database.db_connection()?)?;
|
||||
let _ = self.read_workspaces_on_server(user_id.clone(), params.clone());
|
||||
let _ = self.read_workspaces_on_server(user_id, params);
|
||||
Ok(workspaces)
|
||||
}
|
||||
|
||||
@ -239,13 +239,9 @@ impl WorkspaceController {
|
||||
};
|
||||
let workspace = self.read_local_workspace(workspace_id, &user_id, &*self.database.db_connection()?)?;
|
||||
|
||||
let mut latest_view: Option<View> = None;
|
||||
match self.view_controller.latest_visit_view() {
|
||||
Ok(view) => latest_view = view,
|
||||
Err(_) => {},
|
||||
}
|
||||
let latest_view: Option<View> = self.view_controller.latest_visit_view().unwrap_or(None);
|
||||
let setting = CurrentWorkspaceSetting { workspace, latest_view };
|
||||
let _ = self.read_workspaces_on_server(user_id.clone(), params)?;
|
||||
let _ = self.read_workspaces_on_server(user_id, params)?;
|
||||
Ok(setting)
|
||||
}
|
||||
|
||||
@ -361,14 +357,14 @@ impl WorkspaceController {
|
||||
let _ = (&*conn).immediate_transaction::<_, WorkspaceError, _>(|| {
|
||||
tracing::debug!("Save {} workspace", workspaces.len());
|
||||
for workspace in &workspaces.items {
|
||||
let mut m_workspace = workspace.clone();
|
||||
let apps = m_workspace.apps.into_inner();
|
||||
let m_workspace = workspace.clone();
|
||||
let apps = m_workspace.apps.clone().into_inner();
|
||||
let workspace_table = WorkspaceTable::new(m_workspace, &user_id);
|
||||
|
||||
let _ = workspace_sql.create_workspace(workspace_table, &*conn)?;
|
||||
tracing::debug!("Save {} apps", apps.len());
|
||||
for mut app in apps {
|
||||
let views = app.belongings.into_inner();
|
||||
for app in apps {
|
||||
let views = app.belongings.clone().into_inner();
|
||||
match app_ctrl.save_app(app, &*conn) {
|
||||
Ok(_) => {},
|
||||
Err(e) => log::error!("create app failed: {:?}", e),
|
||||
|
@ -47,13 +47,13 @@ impl AppTable {
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::Into<Trash> for AppTable {
|
||||
fn into(self) -> Trash {
|
||||
impl std::convert::From<AppTable> for Trash {
|
||||
fn from(table: AppTable) -> Self {
|
||||
Trash {
|
||||
id: self.id,
|
||||
name: self.name,
|
||||
modified_time: self.modified_time,
|
||||
create_time: self.create_time,
|
||||
id: table.id,
|
||||
name: table.name,
|
||||
modified_time: table.modified_time,
|
||||
create_time: table.create_time,
|
||||
ty: TrashType::App,
|
||||
}
|
||||
}
|
||||
@ -117,18 +117,17 @@ impl AppTableChangeset {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::Into<App> for AppTable {
|
||||
fn into(self) -> App {
|
||||
impl std::convert::From<AppTable> for App {
|
||||
fn from(table: AppTable) -> Self {
|
||||
App {
|
||||
id: self.id,
|
||||
workspace_id: self.workspace_id,
|
||||
name: self.name,
|
||||
desc: self.desc,
|
||||
id: table.id,
|
||||
workspace_id: table.workspace_id,
|
||||
name: table.name,
|
||||
desc: table.desc,
|
||||
belongings: RepeatedView::default(),
|
||||
version: self.version,
|
||||
modified_time: self.modified_time,
|
||||
create_time: self.create_time,
|
||||
version: table.version,
|
||||
modified_time: table.modified_time,
|
||||
create_time: table.create_time,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -12,14 +12,14 @@ pub(crate) struct TrashTable {
|
||||
pub create_time: i64,
|
||||
pub ty: SqlTrashType,
|
||||
}
|
||||
impl std::convert::Into<Trash> for TrashTable {
|
||||
fn into(self) -> Trash {
|
||||
impl std::convert::From<TrashTable> for Trash {
|
||||
fn from(table: TrashTable) -> Self {
|
||||
Trash {
|
||||
id: self.id,
|
||||
name: self.name,
|
||||
modified_time: self.modified_time,
|
||||
create_time: self.create_time,
|
||||
ty: self.ty.into(),
|
||||
id: table.id,
|
||||
name: table.name,
|
||||
modified_time: table.modified_time,
|
||||
create_time: table.create_time,
|
||||
ty: table.ty.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -77,9 +77,9 @@ impl std::convert::From<i32> for SqlTrashType {
|
||||
|
||||
impl_sql_integer_expression!(SqlTrashType);
|
||||
|
||||
impl std::convert::Into<TrashType> for SqlTrashType {
|
||||
fn into(self) -> TrashType {
|
||||
match self {
|
||||
impl std::convert::From<SqlTrashType> for TrashType {
|
||||
fn from(ty: SqlTrashType) -> Self {
|
||||
match ty {
|
||||
SqlTrashType::Unknown => TrashType::Unknown,
|
||||
SqlTrashType::View => TrashType::View,
|
||||
SqlTrashType::App => TrashType::App,
|
||||
|
@ -50,33 +50,33 @@ impl ViewTable {
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::Into<View> for ViewTable {
|
||||
fn into(self) -> View {
|
||||
let view_type = match self.view_type {
|
||||
impl std::convert::From<ViewTable> for View {
|
||||
fn from(table: ViewTable) -> Self {
|
||||
let view_type = match table.view_type {
|
||||
ViewTableType::Docs => ViewType::Doc,
|
||||
};
|
||||
|
||||
View {
|
||||
id: self.id,
|
||||
belong_to_id: self.belong_to_id,
|
||||
name: self.name,
|
||||
desc: self.desc,
|
||||
id: table.id,
|
||||
belong_to_id: table.belong_to_id,
|
||||
name: table.name,
|
||||
desc: table.desc,
|
||||
view_type,
|
||||
belongings: RepeatedView::default(),
|
||||
modified_time: self.modified_time,
|
||||
version: self.version,
|
||||
create_time: self.create_time,
|
||||
modified_time: table.modified_time,
|
||||
version: table.version,
|
||||
create_time: table.create_time,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::Into<Trash> for ViewTable {
|
||||
fn into(self) -> Trash {
|
||||
impl std::convert::From<ViewTable> for Trash {
|
||||
fn from(table: ViewTable) -> Self {
|
||||
Trash {
|
||||
id: self.id,
|
||||
name: self.name,
|
||||
modified_time: self.modified_time,
|
||||
create_time: self.create_time,
|
||||
id: table.id,
|
||||
name: table.name,
|
||||
modified_time: table.modified_time,
|
||||
create_time: table.create_time,
|
||||
ty: TrashType::View,
|
||||
}
|
||||
}
|
||||
|
@ -40,7 +40,7 @@ impl WorkspaceTableSql {
|
||||
.into_boxed();
|
||||
|
||||
if let Some(workspace_id) = workspace_id {
|
||||
filter = filter.filter(workspace_table::id.eq(workspace_id.to_owned()));
|
||||
filter = filter.filter(workspace_table::id.eq(workspace_id));
|
||||
};
|
||||
|
||||
let workspaces = filter.load::<WorkspaceTable>(conn)?;
|
||||
|
@ -31,15 +31,15 @@ impl WorkspaceTable {
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::Into<Workspace> for WorkspaceTable {
|
||||
fn into(self) -> Workspace {
|
||||
impl std::convert::From<WorkspaceTable> for Workspace {
|
||||
fn from(table: WorkspaceTable) -> Self {
|
||||
Workspace {
|
||||
id: self.id,
|
||||
name: self.name,
|
||||
desc: self.desc,
|
||||
id: table.id,
|
||||
name: table.name,
|
||||
desc: table.desc,
|
||||
apps: RepeatedApp::default(),
|
||||
modified_time: self.modified_time,
|
||||
create_time: self.create_time,
|
||||
modified_time: table.modified_time,
|
||||
create_time: table.create_time,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
#![allow(clippy::type_complexity)]
|
||||
use crate::{module::WorkspaceUser, services::server::Server};
|
||||
use lib_infra::retry::Action;
|
||||
use pin_project::pin_project;
|
||||
@ -27,7 +28,7 @@ impl<Fut, T, E> RetryAction<Fut, T, E> {
|
||||
Fut: Future<Output = Result<T, E>> + Send + Sync + 'static,
|
||||
F: Fn(String, Server) -> Fut + Send + Sync + 'static,
|
||||
{
|
||||
let token = user.token().unwrap_or("".to_owned());
|
||||
let token = user.token().unwrap_or_else(|_| "".to_owned());
|
||||
Self {
|
||||
token,
|
||||
server,
|
||||
|
@ -53,7 +53,7 @@ where
|
||||
T: std::convert::TryFrom<Bytes, Error = protobuf::ProtobufError>,
|
||||
{
|
||||
fn parse_from_bytes(bytes: Bytes) -> Result<Self, DispatchError> {
|
||||
let data = T::try_from(bytes.clone())?;
|
||||
let data = T::try_from(bytes)?;
|
||||
Ok(data)
|
||||
}
|
||||
}
|
||||
|
@ -76,7 +76,7 @@ where
|
||||
T: FromBytes,
|
||||
{
|
||||
match payload {
|
||||
Payload::None => Err(InternalError::UnexpectedNone(format!("Parse fail, expected payload")).into()),
|
||||
Payload::None => Err(InternalError::UnexpectedNone("Parse fail, expected payload".to_string()).into()),
|
||||
Payload::Bytes(bytes) => {
|
||||
let data = T::parse_from_bytes(bytes.clone())?;
|
||||
Ok(Data(data))
|
||||
|
@ -26,8 +26,7 @@ impl EventDispatch {
|
||||
tracing::trace!("{}", module_info(&modules));
|
||||
let module_map = as_module_map(modules);
|
||||
|
||||
let dispatch = EventDispatch { module_map, runtime };
|
||||
dispatch
|
||||
EventDispatch { module_map, runtime }
|
||||
}
|
||||
|
||||
pub fn async_send<Req>(dispatch: Arc<EventDispatch>, request: Req) -> DispatchFuture<EventResponse>
|
||||
@ -99,9 +98,7 @@ where
|
||||
|
||||
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
let this = self.as_mut().project();
|
||||
loop {
|
||||
return Poll::Ready(futures_core::ready!(this.fut.poll(cx)));
|
||||
}
|
||||
Poll::Ready(futures_core::ready!(this.fut.poll(cx)))
|
||||
}
|
||||
}
|
||||
|
||||
@ -168,7 +165,7 @@ impl Service<DispatchContext> for DispatchService {
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn module_info(modules: &Vec<Module>) -> String {
|
||||
fn module_info(modules: &[Module]) -> String {
|
||||
let mut info = format!("{} modules loaded\n", modules.len());
|
||||
for module in modules {
|
||||
info.push_str(&format!("-> {} loaded \n", module.name));
|
||||
|
@ -1,3 +1,4 @@
|
||||
#![allow(clippy::module_inception)]
|
||||
mod errors;
|
||||
|
||||
pub use errors::*;
|
||||
|
@ -1,3 +1,4 @@
|
||||
#![allow(clippy::module_inception)]
|
||||
pub use container::*;
|
||||
pub use data::*;
|
||||
pub use module::*;
|
||||
|
@ -59,14 +59,18 @@ pub struct Module {
|
||||
service_map: Arc<HashMap<Event, EventServiceFactory>>,
|
||||
}
|
||||
|
||||
impl Module {
|
||||
pub fn new() -> Self {
|
||||
impl std::default::Default for Module {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
name: "".to_owned(),
|
||||
module_data: Arc::new(ModuleDataMap::new()),
|
||||
service_map: Arc::new(HashMap::new()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Module {
|
||||
pub fn new() -> Self { Module::default() }
|
||||
|
||||
pub fn name(mut self, s: &str) -> Self {
|
||||
self.name = s.to_owned();
|
||||
@ -99,7 +103,7 @@ impl Module {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn events(&self) -> Vec<Event> { self.service_map.keys().map(|key| key.clone()).collect::<Vec<_>>() }
|
||||
pub fn events(&self) -> Vec<Event> { self.service_map.keys().cloned().collect::<Vec<_>>() }
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@ -168,7 +172,7 @@ impl Service<ModuleRequest> for ModuleService {
|
||||
fn call(&self, request: ModuleRequest) -> Self::Future {
|
||||
let ModuleRequest { id, event, payload } = request;
|
||||
let module_data = self.module_data.clone();
|
||||
let request = EventRequest::new(id.clone(), event, module_data);
|
||||
let request = EventRequest::new(id, event, module_data);
|
||||
|
||||
match self.service_map.get(&request.event) {
|
||||
Some(factory) => {
|
||||
@ -200,10 +204,8 @@ impl Future for ModuleServiceFuture {
|
||||
type Output = Result<EventResponse, DispatchError>;
|
||||
|
||||
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
loop {
|
||||
let (_, response) = ready!(self.as_mut().project().fut.poll(cx))?.into_parts();
|
||||
return Poll::Ready(Ok(response));
|
||||
}
|
||||
let (_, response) = ready!(self.as_mut().project().fut.poll(cx))?.into_parts();
|
||||
Poll::Ready(Ok(response))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
#![allow(clippy::module_inception)]
|
||||
pub mod payload;
|
||||
mod request;
|
||||
|
||||
|
@ -25,26 +25,25 @@ fn format_payload_print(payload: &Payload, f: &mut Formatter<'_>) -> fmt::Result
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::Into<Payload> for String {
|
||||
fn into(self) -> Payload { Payload::Bytes(Bytes::from(self)) }
|
||||
impl std::convert::From<String> for Payload {
|
||||
fn from(s: String) -> Self { Payload::Bytes(Bytes::from(s)) }
|
||||
}
|
||||
|
||||
impl std::convert::Into<Payload> for &'_ String {
|
||||
fn into(self) -> Payload { Payload::Bytes(Bytes::from(self.to_owned())) }
|
||||
impl std::convert::From<&'_ String> for Payload {
|
||||
fn from(s: &String) -> Self { Payload::Bytes(Bytes::from(s.to_owned())) }
|
||||
}
|
||||
|
||||
impl std::convert::Into<Payload> for Bytes {
|
||||
fn into(self) -> Payload { Payload::Bytes(self) }
|
||||
impl std::convert::From<Bytes> for Payload {
|
||||
fn from(bytes: Bytes) -> Self { Payload::Bytes(bytes) }
|
||||
}
|
||||
|
||||
impl std::convert::Into<Payload> for () {
|
||||
fn into(self) -> Payload { Payload::None }
|
||||
impl std::convert::From<()> for Payload {
|
||||
fn from(_: ()) -> Self { Payload::None }
|
||||
}
|
||||
impl std::convert::From<Vec<u8>> for Payload {
|
||||
fn from(bytes: Vec<u8>) -> Self { Payload::Bytes(Bytes::from(bytes)) }
|
||||
}
|
||||
|
||||
impl std::convert::Into<Payload> for Vec<u8> {
|
||||
fn into(self) -> Payload { Payload::Bytes(Bytes::from(self)) }
|
||||
}
|
||||
|
||||
impl std::convert::Into<Payload> for &str {
|
||||
fn into(self) -> Payload { self.to_string().into() }
|
||||
impl std::convert::From<&str> for Payload {
|
||||
fn from(s: &str) -> Self { s.to_string().into() }
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
#![allow(clippy::module_inception)]
|
||||
pub use builder::*;
|
||||
pub use responder::*;
|
||||
pub use response::*;
|
||||
|
@ -1,3 +1,4 @@
|
||||
#![allow(clippy::module_inception)]
|
||||
mod boxed;
|
||||
mod handler;
|
||||
mod service;
|
||||
|
@ -40,12 +40,11 @@ impl FlowySystem {
|
||||
});
|
||||
|
||||
let module_map = as_module_map(module_factory());
|
||||
sender_factory(module_map.clone(), &runtime);
|
||||
sender_factory(module_map, &runtime);
|
||||
|
||||
let system = Self { sys_cmd_tx };
|
||||
FlowySystem::set_current(system);
|
||||
let runner = SystemRunner { rt: runtime, stop_rx };
|
||||
runner
|
||||
SystemRunner { rt: runtime, stop_rx }
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
|
@ -1,5 +1,5 @@
|
||||
use std::{io, thread};
|
||||
use thread_id;
|
||||
|
||||
use tokio::runtime;
|
||||
|
||||
pub mod ready;
|
||||
|
@ -28,9 +28,7 @@ where
|
||||
|
||||
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
let this = self.as_mut().project();
|
||||
loop {
|
||||
return Poll::Ready(ready!(this.fut.poll(cx)));
|
||||
}
|
||||
Poll::Ready(ready!(this.fut.poll(cx)))
|
||||
}
|
||||
}
|
||||
|
||||
@ -60,9 +58,7 @@ where
|
||||
|
||||
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
let this = self.as_mut().project();
|
||||
loop {
|
||||
let result = ready!(this.fut.poll(cx));
|
||||
return Poll::Ready(result);
|
||||
}
|
||||
let result = ready!(this.fut.poll(cx));
|
||||
Poll::Ready(result)
|
||||
}
|
||||
}
|
||||
|
@ -1,3 +1,5 @@
|
||||
#![allow(clippy::module_inception)]
|
||||
|
||||
mod kv;
|
||||
mod schema;
|
||||
|
||||
|
@ -1,3 +1,5 @@
|
||||
#![allow(clippy::large_enum_variant)]
|
||||
#![allow(clippy::type_complexity)]
|
||||
use crate::retry::FixedInterval;
|
||||
use pin_project::pin_project;
|
||||
use std::{
|
||||
|
@ -33,7 +33,7 @@ pub trait PragmaExtension: ConnectionExtension {
|
||||
None => format!("PRAGMA {} = '{}'", key, val),
|
||||
};
|
||||
log::trace!("SQLITE {}", query);
|
||||
Ok(self.query::<ST, T>(&query)?)
|
||||
self.query::<ST, T>(&query)
|
||||
}
|
||||
|
||||
fn pragma_get<ST, T>(&self, key: &str, schema: Option<&str>) -> Result<T>
|
||||
@ -45,7 +45,7 @@ pub trait PragmaExtension: ConnectionExtension {
|
||||
None => format!("PRAGMA {}", key),
|
||||
};
|
||||
log::trace!("SQLITE {}", query);
|
||||
Ok(self.query::<ST, T>(&query)?)
|
||||
self.query::<ST, T>(&query)
|
||||
}
|
||||
|
||||
fn pragma_set_busy_timeout(&self, timeout_ms: i32) -> Result<i32> {
|
||||
@ -59,7 +59,7 @@ pub trait PragmaExtension: ConnectionExtension {
|
||||
}
|
||||
|
||||
fn pragma_get_journal_mode(&self, schema: Option<&str>) -> Result<SQLiteJournalMode> {
|
||||
Ok(self.pragma_get::<Text, String>("journal_mode", schema)?.parse()?)
|
||||
self.pragma_get::<Text, String>("journal_mode", schema)?.parse()
|
||||
}
|
||||
|
||||
fn pragma_set_synchronous(&self, synchronous: SQLiteSynchronous, schema: Option<&str>) -> Result<()> {
|
||||
@ -67,7 +67,7 @@ pub trait PragmaExtension: ConnectionExtension {
|
||||
}
|
||||
|
||||
fn pragma_get_synchronous(&self, schema: Option<&str>) -> Result<SQLiteSynchronous> {
|
||||
Ok(self.pragma_get::<Integer, i32>("synchronous", schema)?.try_into()?)
|
||||
self.pragma_get::<Integer, i32>("synchronous", schema)?.try_into()
|
||||
}
|
||||
}
|
||||
impl PragmaExtension for SqliteConnection {}
|
||||
|
@ -25,9 +25,8 @@ impl DartEventCodeGen {
|
||||
for (index, render_ctx) in event_render_ctx.into_iter().enumerate() {
|
||||
let mut event_template = EventTemplate::new();
|
||||
|
||||
match event_template.render(render_ctx, index) {
|
||||
Some(content) => render_result.push_str(content.as_ref()),
|
||||
None => {}
|
||||
if let Some(content) = event_template.render(render_ctx, index) {
|
||||
render_result.push_str(content.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
@ -89,7 +88,7 @@ pub fn parse_event_crate(event_crate: &DartEventCrate) -> Vec<EventASTContext> {
|
||||
ctxt.check().unwrap();
|
||||
attrs
|
||||
.iter()
|
||||
.filter(|attr| attr.attrs.event_attrs.ignore == false)
|
||||
.filter(|attr| !attr.attrs.event_attrs.ignore)
|
||||
.enumerate()
|
||||
.map(|(_index, attr)| EventASTContext::from(&attr.attrs))
|
||||
.collect::<Vec<_>>()
|
||||
@ -103,30 +102,30 @@ pub fn parse_event_crate(event_crate: &DartEventCrate) -> Vec<EventASTContext> {
|
||||
.collect::<Vec<EventASTContext>>()
|
||||
}
|
||||
|
||||
pub fn ast_to_event_render_ctx(ast: &Vec<EventASTContext>) -> Vec<EventRenderContext> {
|
||||
pub fn ast_to_event_render_ctx(ast: &[EventASTContext]) -> Vec<EventRenderContext> {
|
||||
ast.iter()
|
||||
.map(|event_ast| {
|
||||
let input_deserializer = match event_ast.event_input {
|
||||
Some(ref event_input) => Some(event_input.get_ident().unwrap().to_string()),
|
||||
None => None,
|
||||
};
|
||||
let input_deserializer = event_ast
|
||||
.event_input
|
||||
.as_ref()
|
||||
.map(|event_input| event_input.get_ident().unwrap().to_string());
|
||||
|
||||
let output_deserializer = match event_ast.event_output {
|
||||
Some(ref event_output) => Some(event_output.get_ident().unwrap().to_string()),
|
||||
None => None,
|
||||
};
|
||||
let output_deserializer = event_ast
|
||||
.event_output
|
||||
.as_ref()
|
||||
.map(|event_output| event_output.get_ident().unwrap().to_string());
|
||||
// eprintln!(
|
||||
// "😁 {:?} / {:?}",
|
||||
// event_ast.event_input, event_ast.event_output
|
||||
// );
|
||||
|
||||
return EventRenderContext {
|
||||
EventRenderContext {
|
||||
input_deserializer,
|
||||
output_deserializer,
|
||||
error_deserializer: event_ast.event_error.clone(),
|
||||
event: event_ast.event.to_string(),
|
||||
event_ty: event_ast.event_ty.to_string(),
|
||||
};
|
||||
}
|
||||
})
|
||||
.collect::<Vec<EventRenderContext>>()
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ pub struct EventTemplate {
|
||||
tera_context: Context,
|
||||
}
|
||||
|
||||
pub const DART_IMPORTED: &'static str = r#"
|
||||
pub const DART_IMPORTED: &str = r#"
|
||||
/// Auto gen code from rust ast, do not edit
|
||||
part of 'dispatch.dart';
|
||||
"#;
|
||||
@ -21,9 +21,9 @@ pub struct EventRenderContext {
|
||||
#[allow(dead_code)]
|
||||
impl EventTemplate {
|
||||
pub fn new() -> Self {
|
||||
return EventTemplate {
|
||||
EventTemplate {
|
||||
tera_context: Context::new(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
pub fn render(&mut self, ctx: EventRenderContext, index: usize) -> Option<String> {
|
||||
|
@ -1,3 +1,4 @@
|
||||
#![allow(clippy::module_inception)]
|
||||
mod dart_event;
|
||||
mod event_template;
|
||||
|
||||
|
@ -33,7 +33,7 @@ fn parse_files_protobuf(proto_crate_path: &str, proto_output_dir: &str) -> Vec<P
|
||||
.into_iter()
|
||||
.filter_entry(|e| !is_hidden(e))
|
||||
.filter_map(|e| e.ok())
|
||||
.filter(|e| e.file_type().is_dir() == false)
|
||||
.filter(|e| !e.file_type().is_dir())
|
||||
.map(|e| {
|
||||
let path = e.path().to_str().unwrap().to_string();
|
||||
let file_name = e.path().file_stem().unwrap().to_str().unwrap().to_string();
|
||||
@ -64,7 +64,7 @@ fn parse_files_protobuf(proto_crate_path: &str, proto_output_dir: &str) -> Vec<P
|
||||
|
||||
let s = struct_template.render().unwrap();
|
||||
proto_file_content.push_str(s.as_ref());
|
||||
proto_file_content.push_str("\n");
|
||||
proto_file_content.push('\n');
|
||||
});
|
||||
|
||||
let enums = get_ast_enums(&ast);
|
||||
@ -73,7 +73,7 @@ fn parse_files_protobuf(proto_crate_path: &str, proto_output_dir: &str) -> Vec<P
|
||||
enum_template.set_message_enum(&e);
|
||||
let s = enum_template.render().unwrap();
|
||||
proto_file_content.push_str(s.as_ref());
|
||||
proto_file_content.push_str("\n");
|
||||
proto_file_content.push('\n');
|
||||
});
|
||||
|
||||
if !enums.is_empty() || !structs.is_empty() {
|
||||
@ -95,7 +95,7 @@ pub fn parse_or_init_proto_file(path: &str) -> String {
|
||||
let mut proto_file_content = String::new();
|
||||
let imported_content = find_proto_file_import(path);
|
||||
proto_file_content.push_str(imported_content.as_ref());
|
||||
proto_file_content.push_str("\n");
|
||||
proto_file_content.push('\n');
|
||||
proto_file_content
|
||||
}
|
||||
|
||||
@ -105,8 +105,8 @@ pub fn get_ast_structs(ast: &syn::File) -> Vec<Struct> {
|
||||
// file.write_all(content.as_bytes()).unwrap();
|
||||
let ctxt = Ctxt::new();
|
||||
let mut proto_structs: Vec<Struct> = vec![];
|
||||
ast.items.iter().for_each(|item| match item {
|
||||
Item::Struct(item_struct) => {
|
||||
ast.items.iter().for_each(|item| {
|
||||
if let Item::Struct(item_struct) = item {
|
||||
let (_, fields) = struct_from_ast(&ctxt, &item_struct.fields);
|
||||
|
||||
if fields
|
||||
@ -121,7 +121,6 @@ pub fn get_ast_structs(ast: &syn::File) -> Vec<Struct> {
|
||||
});
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
});
|
||||
ctxt.check().unwrap();
|
||||
proto_structs
|
||||
@ -133,20 +132,13 @@ pub fn get_ast_enums(ast: &syn::File) -> Vec<FlowyEnum> {
|
||||
|
||||
ast.items.iter().for_each(|item| {
|
||||
// https://docs.rs/syn/1.0.54/syn/enum.Item.html
|
||||
match item {
|
||||
Item::Enum(item_enum) => {
|
||||
let attrs = flowy_ast::enum_from_ast(
|
||||
&ctxt,
|
||||
&item_enum.ident,
|
||||
&item_enum.variants,
|
||||
&ast.attrs,
|
||||
);
|
||||
flowy_enums.push(FlowyEnum {
|
||||
name: item_enum.ident.to_string(),
|
||||
attrs,
|
||||
});
|
||||
}
|
||||
_ => {}
|
||||
if let Item::Enum(item_enum) = item {
|
||||
let attrs =
|
||||
flowy_ast::enum_from_ast(&ctxt, &item_enum.ident, &item_enum.variants, &ast.attrs);
|
||||
flowy_enums.push(FlowyEnum {
|
||||
name: item_enum.ident.to_string(),
|
||||
attrs,
|
||||
});
|
||||
}
|
||||
});
|
||||
ctxt.check().unwrap();
|
||||
@ -182,18 +174,14 @@ fn find_proto_file_import(path: &str) -> String {
|
||||
|
||||
content.lines().for_each(|line| {
|
||||
////Result<Option<Match<'t>>>
|
||||
if let Ok(some_line) = SYNTAX_REGEX.find(line) {
|
||||
if let Some(m) = some_line {
|
||||
result.push_str(m.as_str());
|
||||
result.push_str("\n");
|
||||
}
|
||||
if let Ok(Some(m)) = SYNTAX_REGEX.find(line) {
|
||||
result.push_str(m.as_str());
|
||||
result.push('\n');
|
||||
}
|
||||
|
||||
if let Ok(some_line) = IMPORT_REGEX.find(line) {
|
||||
if let Some(m) = some_line {
|
||||
result.push_str(m.as_str());
|
||||
result.push_str("\n");
|
||||
}
|
||||
if let Ok(Some(m)) = IMPORT_REGEX.find(line) {
|
||||
result.push_str(m.as_str());
|
||||
result.push('\n');
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -27,7 +27,7 @@ impl ProtoGen {
|
||||
}
|
||||
}
|
||||
|
||||
fn write_proto_files(crate_infos: &Vec<CrateProtoInfo>) {
|
||||
fn write_proto_files(crate_infos: &[CrateProtoInfo]) {
|
||||
for crate_info in crate_infos {
|
||||
let dir = crate_info.inner.proto_file_output_dir();
|
||||
crate_info.files.iter().for_each(|info| {
|
||||
@ -41,7 +41,7 @@ fn write_proto_files(crate_infos: &Vec<CrateProtoInfo>) {
|
||||
}
|
||||
}
|
||||
|
||||
fn write_rust_crate_mod_file(crate_infos: &Vec<CrateProtoInfo>) {
|
||||
fn write_rust_crate_mod_file(crate_infos: &[CrateProtoInfo]) {
|
||||
for crate_info in crate_infos {
|
||||
let mod_path = crate_info.inner.proto_model_mod_file();
|
||||
match OpenOptions::new()
|
||||
@ -56,7 +56,7 @@ fn write_rust_crate_mod_file(crate_infos: &Vec<CrateProtoInfo>) {
|
||||
mod_file_content.push_str("// Auto-generated, do not edit \n");
|
||||
walk_dir(
|
||||
crate_info.inner.proto_file_output_dir().as_ref(),
|
||||
|e| e.file_type().is_dir() == false,
|
||||
|e| !e.file_type().is_dir(),
|
||||
|_, name| {
|
||||
let c = format!("\nmod {}; \npub use {}::*; \n", &name, &name);
|
||||
mod_file_content.push_str(c.as_ref());
|
||||
@ -72,7 +72,7 @@ fn write_rust_crate_mod_file(crate_infos: &Vec<CrateProtoInfo>) {
|
||||
}
|
||||
|
||||
fn write_flutter_protobuf_package_mod_file(
|
||||
crate_infos: &Vec<CrateProtoInfo>,
|
||||
crate_infos: &[CrateProtoInfo],
|
||||
package_info: &FlutterProtobufInfo,
|
||||
) {
|
||||
let model_dir = package_info.model_dir();
|
||||
@ -91,7 +91,7 @@ fn write_flutter_protobuf_package_mod_file(
|
||||
|
||||
walk_dir(
|
||||
crate_info.inner.proto_file_output_dir().as_ref(),
|
||||
|e| e.file_type().is_dir() == false,
|
||||
|e| !e.file_type().is_dir(),
|
||||
|_, name| {
|
||||
let c = format!("export './{}.pb.dart';\n", &name);
|
||||
mod_file_content.push_str(c.as_ref());
|
||||
@ -108,7 +108,7 @@ fn write_flutter_protobuf_package_mod_file(
|
||||
}
|
||||
}
|
||||
|
||||
fn run_rust_protoc(crate_infos: &Vec<CrateProtoInfo>) {
|
||||
fn run_rust_protoc(crate_infos: &[CrateProtoInfo]) {
|
||||
for crate_info in crate_infos {
|
||||
let rust_out = crate_info.inner.proto_struct_output_dir();
|
||||
let proto_path = crate_info.inner.proto_file_output_dir();
|
||||
@ -130,7 +130,7 @@ fn run_rust_protoc(crate_infos: &Vec<CrateProtoInfo>) {
|
||||
}
|
||||
}
|
||||
|
||||
fn run_flutter_protoc(crate_infos: &Vec<CrateProtoInfo>, package_info: &FlutterProtobufInfo) {
|
||||
fn run_flutter_protoc(crate_infos: &[CrateProtoInfo], package_info: &FlutterProtobufInfo) {
|
||||
let model_dir = package_info.model_dir();
|
||||
if !Path::new(&model_dir).exists() {
|
||||
std::fs::create_dir_all(&model_dir).unwrap();
|
||||
@ -158,11 +158,8 @@ fn run_flutter_protoc(crate_infos: &Vec<CrateProtoInfo>, package_info: &FlutterP
|
||||
}
|
||||
|
||||
fn remove_everything_in_dir(dir: &str) {
|
||||
if Path::new(dir).exists() {
|
||||
if std::fs::remove_dir_all(dir).is_err()
|
||||
{
|
||||
panic!("Reset protobuf directory failed")
|
||||
};
|
||||
if Path::new(dir).exists() && std::fs::remove_dir_all(dir).is_err() {
|
||||
panic!("Reset protobuf directory failed")
|
||||
}
|
||||
std::fs::create_dir_all(dir).unwrap();
|
||||
}
|
||||
|
@ -102,7 +102,7 @@ pub fn parse_crate_info_from_path(root: &str) -> Vec<ProtobufCrate> {
|
||||
.filter_map(|e| e.ok())
|
||||
.filter(|e| is_crate_dir(e))
|
||||
.flat_map(|e| parse_crate_config_from(&e))
|
||||
.map(|crate_config| ProtobufCrate::from_config(crate_config))
|
||||
.map(ProtobufCrate::from_config)
|
||||
.collect::<Vec<ProtobufCrate>>()
|
||||
}
|
||||
|
||||
|
@ -15,11 +15,11 @@ pub struct ProtobufDeriveMeta {
|
||||
impl ProtobufDeriveMeta {
|
||||
pub fn new(structs: Vec<String>, enums: Vec<String>) -> Self {
|
||||
let enums: Vec<_> = enums.into_iter().unique().collect();
|
||||
return ProtobufDeriveMeta {
|
||||
ProtobufDeriveMeta {
|
||||
context: Context::new(),
|
||||
structs,
|
||||
enums,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
pub fn render(&mut self) -> Option<String> {
|
||||
@ -37,7 +37,7 @@ impl ProtobufDeriveMeta {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write_derive_meta(crate_infos: &Vec<CrateProtoInfo>, derive_meta_dir: &str) {
|
||||
pub fn write_derive_meta(crate_infos: &[CrateProtoInfo], derive_meta_dir: &str) {
|
||||
let file_proto_infos = crate_infos
|
||||
.iter()
|
||||
.map(|ref crate_info| &crate_info.files)
|
||||
@ -58,7 +58,7 @@ pub fn write_derive_meta(crate_infos: &Vec<CrateProtoInfo>, derive_meta_dir: &st
|
||||
let mut derive_template = ProtobufDeriveMeta::new(structs, enums);
|
||||
let new_content = derive_template.render().unwrap();
|
||||
let old_content = read_file(derive_meta_dir).unwrap();
|
||||
if new_content.clone() == old_content {
|
||||
if new_content == old_content {
|
||||
return;
|
||||
}
|
||||
// println!("{}", diff_lines(&old_content, &new_content));
|
||||
|
@ -1,3 +1,4 @@
|
||||
#![allow(clippy::module_inception)]
|
||||
mod derive_meta;
|
||||
|
||||
pub use derive_meta::*;
|
||||
|
@ -10,10 +10,10 @@ pub struct EnumTemplate {
|
||||
#[allow(dead_code)]
|
||||
impl EnumTemplate {
|
||||
pub fn new() -> Self {
|
||||
return EnumTemplate {
|
||||
EnumTemplate {
|
||||
context: Context::new(),
|
||||
items: vec![],
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_message_enum(&mut self, flowy_enum: &FlowyEnum) {
|
||||
|
@ -24,10 +24,10 @@ pub struct StructTemplate {
|
||||
#[allow(dead_code)]
|
||||
impl StructTemplate {
|
||||
pub fn new() -> Self {
|
||||
return StructTemplate {
|
||||
StructTemplate {
|
||||
context: Context::new(),
|
||||
fields: vec![],
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_message_struct_name(&mut self, name: &str) {
|
||||
@ -46,8 +46,8 @@ impl StructTemplate {
|
||||
mapped_ty = RUST_TYPE_MAP[ty];
|
||||
}
|
||||
|
||||
match field.bracket_category {
|
||||
Some(ref category) => match category {
|
||||
if let Some(ref category) = field.bracket_category {
|
||||
match category {
|
||||
BracketCategory::Opt => match &field.bracket_inner_ty {
|
||||
None => {}
|
||||
Some(inner_ty) => match inner_ty.to_string().as_str() {
|
||||
@ -93,8 +93,7 @@ impl StructTemplate {
|
||||
BracketCategory::Other => self
|
||||
.fields
|
||||
.push(format!("{} {} = {};", mapped_ty, name, index)),
|
||||
},
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -40,7 +40,7 @@ pub fn save_content_to_file_with_diff_prompt(content: &str, output_file: &str, _
|
||||
}
|
||||
};
|
||||
if new_content != old_content {
|
||||
print_diff(old_content.clone(), new_content.clone());
|
||||
print_diff(old_content, new_content.clone());
|
||||
write_to_file()
|
||||
// if force_write {
|
||||
// write_to_file()
|
||||
@ -98,8 +98,7 @@ pub fn get_tera(directory: &str) -> Tera {
|
||||
.display()
|
||||
.to_string();
|
||||
let mut template_path = format!("{}/**/*.tera", root_absolute_path);
|
||||
if cfg!(windows)
|
||||
{
|
||||
if cfg!(windows) {
|
||||
// remove "\\?\" prefix on windows
|
||||
template_path = format!("{}/**/*.tera", &root_absolute_path[4..]);
|
||||
}
|
||||
@ -115,7 +114,7 @@ pub fn get_tera(directory: &str) -> Tera {
|
||||
|
||||
pub fn is_crate_dir(e: &walkdir::DirEntry) -> bool {
|
||||
let cargo = e.path().file_stem().unwrap().to_str().unwrap().to_string();
|
||||
cargo == "Cargo".to_string()
|
||||
cargo == *"Cargo"
|
||||
}
|
||||
|
||||
pub fn is_proto_file(e: &walkdir::DirEntry) -> bool {
|
||||
@ -123,14 +122,14 @@ pub fn is_proto_file(e: &walkdir::DirEntry) -> bool {
|
||||
return false;
|
||||
}
|
||||
let ext = e.path().extension().unwrap().to_str().unwrap().to_string();
|
||||
ext == "proto".to_string()
|
||||
ext == *"proto"
|
||||
}
|
||||
|
||||
pub fn is_hidden(entry: &walkdir::DirEntry) -> bool {
|
||||
entry
|
||||
.file_name()
|
||||
.to_str()
|
||||
.map(|s| s.starts_with("."))
|
||||
.map(|s| s.starts_with('.'))
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
pub const HOST: &'static str = "localhost:8000";
|
||||
pub const HTTP_SCHEMA: &'static str = "http";
|
||||
pub const WS_SCHEMA: &'static str = "ws";
|
||||
pub const HEADER_TOKEN: &'static str = "token";
|
||||
pub const HOST: &str = "localhost:8000";
|
||||
pub const HTTP_SCHEMA: &str = "http";
|
||||
pub const WS_SCHEMA: &str = "ws";
|
||||
pub const HEADER_TOKEN: &str = "token";
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ServerConfig {
|
||||
|
@ -1,3 +1,4 @@
|
||||
#![allow(clippy::module_inception)]
|
||||
mod request;
|
||||
|
||||
pub use request::*;
|
||||
|
@ -23,8 +23,8 @@ pub struct HttpRequestBuilder {
|
||||
middleware: Vec<Arc<dyn ResponseMiddleware + Send + Sync>>,
|
||||
}
|
||||
|
||||
impl HttpRequestBuilder {
|
||||
pub fn new() -> Self {
|
||||
impl std::default::Default for HttpRequestBuilder {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
url: "".to_owned(),
|
||||
body: None,
|
||||
@ -34,6 +34,10 @@ impl HttpRequestBuilder {
|
||||
middleware: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HttpRequestBuilder {
|
||||
pub fn new() -> Self { HttpRequestBuilder::default() }
|
||||
|
||||
pub fn middleware<T>(mut self, middleware: Arc<T>) -> Self
|
||||
where
|
||||
|
@ -1,3 +1,4 @@
|
||||
#![allow(clippy::module_inception)]
|
||||
mod response;
|
||||
|
||||
#[cfg(feature = "http_server")]
|
||||
|
@ -118,7 +118,7 @@ impl<'a> ASTField<'a> {
|
||||
Some(inner) => {
|
||||
match inner.primitive_ty {
|
||||
PrimitiveTy::Map(map_info) => {
|
||||
bracket_category = Some(BracketCategory::Map((map_info.key.clone(), map_info.value.clone())))
|
||||
bracket_category = Some(BracketCategory::Map((map_info.key.clone(), map_info.value)))
|
||||
},
|
||||
PrimitiveTy::Vec => {
|
||||
bracket_category = Some(BracketCategory::Vec);
|
||||
@ -170,7 +170,7 @@ impl<'a> ASTField<'a> {
|
||||
#[allow(dead_code)]
|
||||
pub fn name(&self) -> Option<syn::Ident> {
|
||||
if let syn::Member::Named(ident) = &self.member {
|
||||
return Some(ident.clone());
|
||||
Some(ident.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@ -205,7 +205,7 @@ pub fn enum_from_ast<'a>(
|
||||
cx: &Ctxt,
|
||||
ident: &syn::Ident,
|
||||
variants: &'a Punctuated<syn::Variant, Token![,]>,
|
||||
enum_attrs: &Vec<syn::Attribute>,
|
||||
enum_attrs: &[syn::Attribute],
|
||||
) -> Vec<ASTEnumVariant<'a>> {
|
||||
variants
|
||||
.iter()
|
||||
|
@ -1,5 +1,5 @@
|
||||
#![allow(clippy::all)]
|
||||
use crate::{symbol::*, Ctxt};
|
||||
|
||||
use quote::ToTokens;
|
||||
use syn::{
|
||||
self,
|
||||
@ -194,7 +194,7 @@ impl ASTAttrField {
|
||||
}
|
||||
|
||||
ASTAttrField {
|
||||
name: ident.to_string().clone(),
|
||||
name: ident,
|
||||
pb_index: pb_index.get(),
|
||||
pb_one_of: pb_one_of.get(),
|
||||
skip_serializing: skip_serializing.get(),
|
||||
@ -205,12 +205,7 @@ impl ASTAttrField {
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn pb_index(&self) -> Option<String> {
|
||||
match self.pb_index {
|
||||
Some(ref lit) => Some(lit.base10_digits().to_string()),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
pub fn pb_index(&self) -> Option<String> { self.pb_index.as_ref().map(|lit| lit.base10_digits().to_string()) }
|
||||
|
||||
pub fn is_one_of(&self) -> bool { self.pb_one_of }
|
||||
|
||||
@ -249,21 +244,18 @@ pub struct ASTEnumAttrVariant {
|
||||
}
|
||||
|
||||
impl ASTEnumAttrVariant {
|
||||
pub fn from_ast(ctxt: &Ctxt, ident: &syn::Ident, variant: &syn::Variant, enum_attrs: &Vec<syn::Attribute>) -> Self {
|
||||
pub fn from_ast(ctxt: &Ctxt, ident: &syn::Ident, variant: &syn::Variant, enum_attrs: &[syn::Attribute]) -> Self {
|
||||
let enum_item_name = variant.ident.to_string();
|
||||
let enum_name = ident.to_string();
|
||||
let mut value = String::new();
|
||||
if variant.discriminant.is_some() {
|
||||
match variant.discriminant.as_ref().unwrap().1 {
|
||||
syn::Expr::Lit(ref expr_list) => {
|
||||
let lit_int = if let syn::Lit::Int(ref int_value) = expr_list.lit {
|
||||
int_value
|
||||
} else {
|
||||
unimplemented!()
|
||||
};
|
||||
value = lit_int.base10_digits().to_string();
|
||||
},
|
||||
_ => {},
|
||||
if let syn::Expr::Lit(ref expr_list) = variant.discriminant.as_ref().unwrap().1 {
|
||||
let lit_int = if let syn::Lit::Int(ref int_value) = expr_list.lit {
|
||||
int_value
|
||||
} else {
|
||||
unimplemented!()
|
||||
};
|
||||
value = lit_int.base10_digits().to_string();
|
||||
}
|
||||
}
|
||||
let event_attrs = get_event_attrs_from(ctxt, &variant.attrs, enum_attrs);
|
||||
@ -282,11 +274,7 @@ impl ASTEnumAttrVariant {
|
||||
pub fn event_error(&self) -> String { self.event_attrs.error_ty.as_ref().unwrap().clone() }
|
||||
}
|
||||
|
||||
fn get_event_attrs_from(
|
||||
ctxt: &Ctxt,
|
||||
variant_attrs: &Vec<syn::Attribute>,
|
||||
enum_attrs: &Vec<syn::Attribute>,
|
||||
) -> EventAttrs {
|
||||
fn get_event_attrs_from(ctxt: &Ctxt, variant_attrs: &[syn::Attribute], enum_attrs: &[syn::Attribute]) -> EventAttrs {
|
||||
let mut event_attrs = EventAttrs {
|
||||
input: None,
|
||||
output: None,
|
||||
@ -296,7 +284,7 @@ fn get_event_attrs_from(
|
||||
|
||||
enum_attrs
|
||||
.iter()
|
||||
.filter(|attr| attr.path.segments.iter().find(|s| s.ident == EVENT_ERR).is_some())
|
||||
.filter(|attr| attr.path.segments.iter().any(|s| s.ident == EVENT_ERR))
|
||||
.for_each(|attr| {
|
||||
if let Ok(NameValue(named_value)) = attr.parse_meta() {
|
||||
if let syn::Lit::Str(s) = named_value.lit {
|
||||
@ -357,13 +345,12 @@ fn get_event_attrs_from(
|
||||
}
|
||||
|
||||
// eprintln!("😁{:#?}", event_attrs);
|
||||
|
||||
event_attrs
|
||||
}
|
||||
|
||||
pub fn get_meta_items(cx: &Ctxt, attr: &syn::Attribute) -> Result<Vec<syn::NestedMeta>, ()> {
|
||||
if attr.path != PB_ATTRS && attr.path != EVENT {
|
||||
return Ok(Vec::new());
|
||||
return Ok(vec![]);
|
||||
}
|
||||
|
||||
// http://strymon.systems.ethz.ch/typename/syn/enum.Meta.html
|
||||
@ -438,7 +425,7 @@ fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree {
|
||||
}
|
||||
|
||||
fn default_pb_type(ctxt: &Ctxt, ident: &syn::Ident) -> syn::Type {
|
||||
let take_ident = format!("{}", ident.to_string());
|
||||
let take_ident = ident.to_string();
|
||||
let lit_str = syn::LitStr::new(&take_ident, ident.span());
|
||||
if let Ok(tokens) = spanned_tokens(&lit_str) {
|
||||
if let Ok(pb_struct_ty) = syn::parse2(tokens) {
|
||||
|
@ -1,6 +1,5 @@
|
||||
use quote::ToTokens;
|
||||
use std::{cell::RefCell, fmt::Display, thread};
|
||||
use syn;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Ctxt {
|
||||
|
@ -17,7 +17,7 @@ impl EventASTContext {
|
||||
}
|
||||
|
||||
let event = format_ident!("{}", &command_name);
|
||||
let splits = command_name.split("_").collect::<Vec<&str>>();
|
||||
let splits = command_name.split('_').collect::<Vec<&str>>();
|
||||
|
||||
let event_ty = format_ident!("{}", variant.enum_name);
|
||||
let event_request_struct = format_ident!("{}Event", &splits.join(""));
|
||||
|
@ -72,7 +72,7 @@ pub fn parse_ty<'a>(ctxt: &Ctxt, ty: &'a syn::Type) -> Option<TyInfo<'a>> {
|
||||
});
|
||||
};
|
||||
}
|
||||
ctxt.error_spanned_by(ty, format!("Unsupported inner type, get inner type fail"));
|
||||
ctxt.error_spanned_by(ty, "Unsupported inner type, get inner type fail".to_string());
|
||||
None
|
||||
}
|
||||
|
||||
@ -104,12 +104,12 @@ pub fn generate_hashmap_ty_info<'a>(
|
||||
let key = parse_ty(ctxt, types[0]).unwrap().ident.to_string();
|
||||
let value = parse_ty(ctxt, types[1]).unwrap().ident.to_string();
|
||||
let bracket_ty_info = Box::new(parse_ty(ctxt, &types[1]));
|
||||
return Some(TyInfo {
|
||||
Some(TyInfo {
|
||||
ident: &path_segment.ident,
|
||||
ty,
|
||||
primitive_ty: PrimitiveTy::Map(MapInfo::new(key, value)),
|
||||
bracket_ty_info,
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
fn generate_option_ty_info<'a>(
|
||||
@ -121,12 +121,12 @@ fn generate_option_ty_info<'a>(
|
||||
assert_eq!(path_segment.ident.to_string(), "Option".to_string());
|
||||
let types = parse_bracketed(bracketed);
|
||||
let bracket_ty_info = Box::new(parse_ty(ctxt, &types[0]));
|
||||
return Some(TyInfo {
|
||||
Some(TyInfo {
|
||||
ident: &path_segment.ident,
|
||||
ty,
|
||||
primitive_ty: PrimitiveTy::Opt,
|
||||
bracket_ty_info,
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
fn generate_vec_ty_info<'a>(
|
||||
@ -146,5 +146,5 @@ fn generate_vec_ty_info<'a>(
|
||||
bracket_ty_info: bracketed_ty_info,
|
||||
});
|
||||
}
|
||||
return None;
|
||||
None
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
#![allow(clippy::module_inception)]
|
||||
mod derive_cache;
|
||||
|
||||
pub use derive_cache::*;
|
||||
|
@ -38,8 +38,7 @@ pub fn make_de_token_steam(ctxt: &Ctxt, ast: &ASTContainer) -> Option<TokenStrea
|
||||
Ok(o)
|
||||
}
|
||||
}
|
||||
}
|
||||
.into();
|
||||
};
|
||||
|
||||
Some(de_token_stream)
|
||||
// None
|
||||
|
@ -20,14 +20,12 @@ pub fn expand_derive(input: &syn::DeriveInput) -> Result<TokenStream, Vec<syn::E
|
||||
|
||||
let mut token_stream: TokenStream = TokenStream::default();
|
||||
|
||||
let de_token_stream = make_de_token_steam(&ctxt, &cont);
|
||||
if de_token_stream.is_some() {
|
||||
token_stream.extend(de_token_stream.unwrap());
|
||||
if let Some(de_token_stream) = make_de_token_steam(&ctxt, &cont) {
|
||||
token_stream.extend(de_token_stream);
|
||||
}
|
||||
|
||||
let se_token_stream = make_se_token_stream(&ctxt, &cont);
|
||||
if se_token_stream.is_some() {
|
||||
token_stream.extend(se_token_stream.unwrap());
|
||||
if let Some(se_token_stream) = make_se_token_stream(&ctxt, &cont) {
|
||||
token_stream.extend(se_token_stream);
|
||||
}
|
||||
|
||||
ctxt.check()?;
|
||||
@ -43,9 +41,8 @@ pub fn expand_enum_derive(input: &syn::DeriveInput) -> Result<TokenStream, Vec<s
|
||||
|
||||
let mut token_stream: TokenStream = TokenStream::default();
|
||||
|
||||
let enum_token_stream = make_enum_token_stream(&ctxt, &cont);
|
||||
if enum_token_stream.is_some() {
|
||||
token_stream.extend(enum_token_stream.unwrap());
|
||||
if let Some(enum_token_stream) = make_enum_token_stream(&ctxt, &cont) {
|
||||
token_stream.extend(enum_token_stream);
|
||||
}
|
||||
|
||||
ctxt.check()?;
|
||||
|
@ -35,8 +35,7 @@ pub fn make_se_token_stream(ctxt: &Ctxt, ast: &ASTContainer) -> Option<TokenStre
|
||||
Ok(pb)
|
||||
}
|
||||
}
|
||||
}
|
||||
.into();
|
||||
};
|
||||
|
||||
Some(se_token_stream)
|
||||
}
|
||||
|
@ -10,13 +10,13 @@ pub(crate) fn get_member_ident<'a>(ctxt: &Ctxt, member: &'a syn::Member) -> Opti
|
||||
if let syn::Member::Named(ref ident) = member {
|
||||
Some(ident)
|
||||
} else {
|
||||
ctxt.error_spanned_by(member, format!("Unsupported member, shouldn't be self.0"));
|
||||
ctxt.error_spanned_by(member, "Unsupported member, shouldn't be self.0".to_string());
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn assert_bracket_ty_is_some(ctxt: &Ctxt, ty_info: &TyInfo) {
|
||||
if ty_info.bracket_ty_info.is_none() {
|
||||
ctxt.error_spanned_by(ty_info.ty, format!("Invalid bracketed type when gen de token steam"));
|
||||
ctxt.error_spanned_by(ty_info.ty, "Invalid bracketed type when gen de token steam".to_string());
|
||||
}
|
||||
}
|
||||
|
@ -125,7 +125,7 @@ impl Document {
|
||||
pub fn format(&mut self, interval: Interval, attribute: Attribute) -> Result<Delta, DocumentError> {
|
||||
let _ = validate_interval(&self.delta, &interval)?;
|
||||
tracing::trace!("format with {} at {}", attribute, interval);
|
||||
let format_delta = self.view.format(&self.delta, attribute.clone(), interval).unwrap();
|
||||
let format_delta = self.view.format(&self.delta, attribute, interval).unwrap();
|
||||
|
||||
tracing::trace!("👉 receive change: {}", format_delta);
|
||||
self.compose_delta(format_delta.clone())?;
|
||||
|
@ -54,7 +54,7 @@ impl InsertExt for PreserveBlockFormatOnInsert {
|
||||
new_delta.retain(offset, plain_attributes());
|
||||
let len = newline_op.get_data().find(NEW_LINE).unwrap();
|
||||
new_delta.retain(len, plain_attributes());
|
||||
new_delta.retain(1, reset_attribute.clone());
|
||||
new_delta.retain(1, reset_attribute);
|
||||
}
|
||||
|
||||
return Some(new_delta);
|
||||
|
@ -44,7 +44,7 @@ impl InsertExt for PreserveInlineFormat {
|
||||
.insert_with_attributes(text, attributes)
|
||||
.build();
|
||||
|
||||
return Some(new_delta);
|
||||
Some(new_delta)
|
||||
}
|
||||
}
|
||||
|
||||
@ -73,7 +73,7 @@ impl InsertExt for PreserveLineFormatOnSplit {
|
||||
new_delta.retain(index + replace_len, plain_attributes());
|
||||
|
||||
if newline_status.is_contain() {
|
||||
debug_assert!(next.has_attribute() == false);
|
||||
debug_assert!(!next.has_attribute());
|
||||
new_delta.insert(NEW_LINE, plain_attributes());
|
||||
return Some(new_delta);
|
||||
}
|
||||
|
@ -26,8 +26,8 @@ pub struct History {
|
||||
capacity: usize,
|
||||
}
|
||||
|
||||
impl History {
|
||||
pub fn new() -> Self {
|
||||
impl std::default::Default for History {
|
||||
fn default() -> Self {
|
||||
History {
|
||||
cur_undo: 1,
|
||||
undos: Vec::new(),
|
||||
@ -35,6 +35,10 @@ impl History {
|
||||
capacity: MAX_UNDOS,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl History {
|
||||
pub fn new() -> Self { History::default() }
|
||||
|
||||
pub fn can_undo(&self) -> bool { !self.undos.is_empty() }
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
#![allow(clippy::module_inception)]
|
||||
mod doc;
|
||||
pub mod parser;
|
||||
mod revision;
|
||||
|
@ -4,7 +4,7 @@ pub struct DocId(pub String);
|
||||
impl DocId {
|
||||
pub fn parse(s: String) -> Result<DocId, String> {
|
||||
if s.trim().is_empty() {
|
||||
return Err(format!("Doc id can not be empty or whitespace"));
|
||||
return Err("Doc id can not be empty or whitespace".to_string());
|
||||
}
|
||||
|
||||
Ok(Self(s))
|
||||
|
@ -45,8 +45,8 @@ impl AsRef<i64> for RevId {
|
||||
fn as_ref(&self) -> &i64 { &self.value }
|
||||
}
|
||||
|
||||
impl std::convert::Into<i64> for RevId {
|
||||
fn into(self) -> i64 { self.value }
|
||||
impl std::convert::From<RevId> for i64 {
|
||||
fn from(rev_id: RevId) -> Self { rev_id.value }
|
||||
}
|
||||
|
||||
impl std::convert::From<i64> for RevId {
|
||||
@ -129,14 +129,7 @@ impl Revision {
|
||||
|
||||
pub fn revision_from_doc(doc: Doc, ty: RevType) -> Revision {
|
||||
let delta_data = doc.data.as_bytes();
|
||||
let revision = Revision::new(
|
||||
doc.base_rev_id.clone(),
|
||||
doc.rev_id.clone(),
|
||||
delta_data.to_owned(),
|
||||
&doc.id,
|
||||
ty,
|
||||
);
|
||||
revision
|
||||
Revision::new(doc.base_rev_id, doc.rev_id, delta_data.to_owned(), &doc.id, ty)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, ProtoBuf)]
|
||||
@ -161,6 +154,8 @@ impl RevisionRange {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool { self.end == self.start }
|
||||
|
||||
pub fn iter(&self) -> RangeInclusive<i64> {
|
||||
debug_assert!(self.start != self.end);
|
||||
RangeInclusive::new(self.start, self.end)
|
||||
|
@ -1,3 +1,4 @@
|
||||
#![allow(clippy::module_inception)]
|
||||
mod ws;
|
||||
|
||||
pub use ws::*;
|
||||
|
@ -9,8 +9,7 @@ pub fn doc_initial_string() -> String { doc_initial_delta().to_json() }
|
||||
#[inline]
|
||||
pub fn initial_read_me() -> Delta {
|
||||
let json = include_str!("READ_ME.json");
|
||||
let delta = Delta::from_json(json).unwrap();
|
||||
delta
|
||||
Delta::from_json(json).unwrap()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -2,12 +2,7 @@ use lib_ot::core::{NEW_LINE, WHITESPACE};
|
||||
use std::sync::atomic::{AtomicI64, Ordering::SeqCst};
|
||||
|
||||
#[inline]
|
||||
pub fn find_newline(s: &str) -> Option<usize> {
|
||||
match s.find(NEW_LINE) {
|
||||
None => None,
|
||||
Some(line_break) => Some(line_break),
|
||||
}
|
||||
}
|
||||
pub fn find_newline(s: &str) -> Option<usize> { s.find(NEW_LINE) }
|
||||
|
||||
#[inline]
|
||||
pub fn is_newline(s: &str) -> bool { s == NEW_LINE }
|
||||
|
@ -68,7 +68,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn names_containing_an_invalid_character_are_rejected() {
|
||||
for name in vec!['/', '(', ')', '"', '<', '>', '\\', '{', '}'] {
|
||||
for name in &['/', '(', ')', '"', '<', '>', '\\', '{', '}'] {
|
||||
let name = name.to_string();
|
||||
assert_err!(UserName::parse(name));
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ impl UserWorkspace {
|
||||
pub fn parse(s: String) -> Result<UserWorkspace, String> {
|
||||
let is_empty_or_whitespace = s.trim().is_empty();
|
||||
if is_empty_or_whitespace {
|
||||
return Err(format!("workspace id is empty or whitespace"));
|
||||
return Err("workspace id is empty or whitespace".to_string());
|
||||
}
|
||||
Ok(Self(s))
|
||||
}
|
||||
|
@ -99,9 +99,7 @@ pub struct App {
|
||||
}
|
||||
|
||||
impl App {
|
||||
pub fn take_belongings(&mut self) -> RepeatedView {
|
||||
::std::mem::replace(&mut self.belongings, RepeatedView::default())
|
||||
}
|
||||
pub fn take_belongings(&mut self) -> RepeatedView { std::mem::take(&mut self.belongings) }
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Debug, Default, ProtoBuf, Clone)]
|
||||
|
@ -79,7 +79,7 @@ impl TryInto<UpdateAppParams> for UpdateAppRequest {
|
||||
|
||||
let color_style = match self.color_style {
|
||||
None => None,
|
||||
Some(color_style) => Some(AppColorStyle::parse(color_style.theme_color.clone())?.into()),
|
||||
Some(color_style) => Some(AppColorStyle::parse(color_style.theme_color)?.into()),
|
||||
};
|
||||
|
||||
Ok(UpdateAppParams {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user