mirror of
https://github.com/AppFlowy-IO/AppFlowy.git
synced 2024-08-30 18:12:39 +00:00
fix clippy warnings
This commit is contained in:
parent
8d9cde17a9
commit
3abd5b953e
@ -1,6 +1,6 @@
|
|||||||
use flowy_document_infra::protobuf::Doc;
|
use flowy_document_infra::protobuf::Doc;
|
||||||
|
|
||||||
pub(crate) const DOC_TABLE: &'static str = "doc_table";
|
pub(crate) const DOC_TABLE: &str = "doc_table";
|
||||||
|
|
||||||
#[derive(Debug, Clone, sqlx::FromRow)]
|
#[derive(Debug, Clone, sqlx::FromRow)]
|
||||||
pub struct DocTable {
|
pub struct DocTable {
|
||||||
|
@ -2,10 +2,10 @@ use chrono::Utc;
|
|||||||
use flowy_workspace_infra::protobuf::{App, RepeatedView, Trash, TrashType, View, ViewType, Workspace};
|
use flowy_workspace_infra::protobuf::{App, RepeatedView, Trash, TrashType, View, ViewType, Workspace};
|
||||||
use protobuf::ProtobufEnum;
|
use protobuf::ProtobufEnum;
|
||||||
|
|
||||||
pub(crate) const WORKSPACE_TABLE: &'static str = "workspace_table";
|
pub(crate) const WORKSPACE_TABLE: &str = "workspace_table";
|
||||||
pub(crate) const APP_TABLE: &'static str = "app_table";
|
pub(crate) const APP_TABLE: &str = "app_table";
|
||||||
pub(crate) const VIEW_TABLE: &'static str = "view_table";
|
pub(crate) const VIEW_TABLE: &str = "view_table";
|
||||||
pub(crate) const TRASH_TABLE: &'static str = "trash_table";
|
pub(crate) const TRASH_TABLE: &str = "trash_table";
|
||||||
|
|
||||||
#[derive(Debug, Clone, sqlx::FromRow)]
|
#[derive(Debug, Clone, sqlx::FromRow)]
|
||||||
pub struct WorkspaceTable {
|
pub struct WorkspaceTable {
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#![allow(clippy::module_inception)]
|
||||||
pub mod app;
|
pub mod app;
|
||||||
pub mod router;
|
pub mod router;
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#![allow(clippy::module_inception)]
|
||||||
pub(crate) use crud::*;
|
pub(crate) use crud::*;
|
||||||
pub use router::*;
|
pub use router::*;
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#![allow(clippy::module_inception)]
|
||||||
pub mod router;
|
pub mod router;
|
||||||
mod trash;
|
mod trash;
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@ use backend_service::{
|
|||||||
errors::{invalid_params, ErrorCode, ServerError},
|
errors::{invalid_params, ErrorCode, ServerError},
|
||||||
response::FlowyResponse,
|
response::FlowyResponse,
|
||||||
};
|
};
|
||||||
use chrono::Utc;
|
|
||||||
use flowy_user_infra::{
|
use flowy_user_infra::{
|
||||||
parser::{UserEmail, UserName, UserPassword},
|
parser::{UserEmail, UserName, UserPassword},
|
||||||
protobuf::{SignInParams, SignInResponse, SignUpParams, SignUpResponse, UpdateUserParams, UserProfile},
|
protobuf::{SignInParams, SignInResponse, SignUpParams, SignUpResponse, UpdateUserParams, UserProfile},
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#![allow(clippy::module_inception)]
|
||||||
pub mod router;
|
pub mod router;
|
||||||
pub mod sql_builder;
|
pub mod sql_builder;
|
||||||
mod view;
|
mod view;
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#![allow(clippy::module_inception)]
|
||||||
pub mod router;
|
pub mod router;
|
||||||
pub mod sql_builder;
|
pub mod sql_builder;
|
||||||
mod workspace;
|
mod workspace;
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#![allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||||
mod c;
|
mod c;
|
||||||
mod model;
|
mod model;
|
||||||
mod protobuf;
|
mod protobuf;
|
||||||
@ -28,7 +29,7 @@ pub extern "C" fn init_sdk(path: *mut c_char) -> i64 {
|
|||||||
let config = FlowySDKConfig::new(path, server_config, "appflowy").log_filter("debug");
|
let config = FlowySDKConfig::new(path, server_config, "appflowy").log_filter("debug");
|
||||||
*FLOWY_SDK.write() = Some(Arc::new(FlowySDK::new(config)));
|
*FLOWY_SDK.write() = Some(Arc::new(FlowySDK::new(config)));
|
||||||
|
|
||||||
return 1;
|
0
|
||||||
}
|
}
|
||||||
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
@ -62,7 +63,7 @@ pub extern "C" fn sync_command(input: *const u8, len: usize) -> *const u8 {
|
|||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub extern "C" fn set_stream_port(port: i64) -> i32 {
|
pub extern "C" fn set_stream_port(port: i64) -> i32 {
|
||||||
dart_notify::dart::DartStreamSender::set_port(port);
|
dart_notify::dart::DartStreamSender::set_port(port);
|
||||||
return 0;
|
0
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline(never)]
|
#[inline(never)]
|
||||||
|
@ -21,6 +21,6 @@ impl FFIRequest {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::convert::Into<ModuleRequest> for FFIRequest {
|
impl std::convert::From<FFIRequest> for ModuleRequest {
|
||||||
fn into(self) -> ModuleRequest { ModuleRequest::new(self.event).payload(self.payload) }
|
fn from(ffi_request: FFIRequest) -> Self { ModuleRequest::new(ffi_request.event).payload(ffi_request.payload) }
|
||||||
}
|
}
|
||||||
|
@ -54,15 +54,9 @@ impl DartNotifyBuilder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn send(self) {
|
pub fn send(self) {
|
||||||
let payload = match self.payload {
|
let payload = self.payload.map(|bytes| bytes.to_vec());
|
||||||
None => None,
|
|
||||||
Some(bytes) => Some(bytes.to_vec()),
|
|
||||||
};
|
|
||||||
|
|
||||||
let error = match self.error {
|
let error = self.error.map(|bytes| bytes.to_vec());
|
||||||
None => None,
|
|
||||||
Some(bytes) => Some(bytes.to_vec()),
|
|
||||||
};
|
|
||||||
|
|
||||||
let subject = SubscribeObject {
|
let subject = SubscribeObject {
|
||||||
source: self.source,
|
source: self.source,
|
||||||
|
@ -30,7 +30,7 @@ impl FlowyDocument {
|
|||||||
server_config: &ServerConfig,
|
server_config: &ServerConfig,
|
||||||
) -> FlowyDocument {
|
) -> FlowyDocument {
|
||||||
let server = construct_doc_server(server_config);
|
let server = construct_doc_server(server_config);
|
||||||
let doc_ctrl = Arc::new(DocController::new(server.clone(), user.clone(), ws_manager.clone()));
|
let doc_ctrl = Arc::new(DocController::new(server, user.clone(), ws_manager));
|
||||||
Self { doc_ctrl, user }
|
Self { doc_ctrl, user }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
use dart_notify::DartNotifyBuilder;
|
use dart_notify::DartNotifyBuilder;
|
||||||
use flowy_derive::ProtoBuf_Enum;
|
use flowy_derive::ProtoBuf_Enum;
|
||||||
const OBSERVABLE_CATEGORY: &'static str = "Doc";
|
const OBSERVABLE_CATEGORY: &str = "Doc";
|
||||||
#[derive(ProtoBuf_Enum, Debug)]
|
#[derive(ProtoBuf_Enum, Debug)]
|
||||||
pub(crate) enum DocObservable {
|
pub(crate) enum DocObservable {
|
||||||
UserCreateDoc = 0,
|
UserCreateDoc = 0,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::convert::Into<i32> for DocObservable {
|
impl std::convert::From<DocObservable> for i32 {
|
||||||
fn into(self) -> i32 { self as i32 }
|
fn from(o: DocObservable) -> Self { o as i32 }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
|
@ -28,13 +28,12 @@ pub(crate) struct DocController {
|
|||||||
impl DocController {
|
impl DocController {
|
||||||
pub(crate) fn new(server: Server, user: Arc<dyn DocumentUser>, ws: Arc<WsDocumentManager>) -> Self {
|
pub(crate) fn new(server: Server, user: Arc<dyn DocumentUser>, ws: Arc<WsDocumentManager>) -> Self {
|
||||||
let cache = Arc::new(DocCache::new());
|
let cache = Arc::new(DocCache::new());
|
||||||
let controller = Self {
|
Self {
|
||||||
server,
|
server,
|
||||||
user,
|
user,
|
||||||
ws_manager: ws,
|
ws_manager: ws,
|
||||||
cache: cache.clone(),
|
cache,
|
||||||
};
|
}
|
||||||
controller
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn init(&self) -> DocResult<()> {
|
pub(crate) fn init(&self) -> DocResult<()> {
|
||||||
@ -47,7 +46,7 @@ impl DocController {
|
|||||||
params: DocIdentifier,
|
params: DocIdentifier,
|
||||||
pool: Arc<ConnectionPool>,
|
pool: Arc<ConnectionPool>,
|
||||||
) -> Result<Arc<ClientEditDoc>, DocError> {
|
) -> Result<Arc<ClientEditDoc>, DocError> {
|
||||||
if self.cache.contains(¶ms.doc_id) == false {
|
if !self.cache.contains(¶ms.doc_id) {
|
||||||
let edit_ctx = self.make_edit_context(¶ms.doc_id, pool.clone()).await?;
|
let edit_ctx = self.make_edit_context(¶ms.doc_id, pool.clone()).await?;
|
||||||
return Ok(edit_ctx);
|
return Ok(edit_ctx);
|
||||||
}
|
}
|
||||||
|
@ -65,7 +65,7 @@ impl RevisionManager {
|
|||||||
pub fn update_rev_id_counter_value(&self, rev_id: i64) { self.rev_id_counter.set(rev_id); }
|
pub fn update_rev_id_counter_value(&self, rev_id: i64) { self.rev_id_counter.set(rev_id); }
|
||||||
|
|
||||||
pub async fn mk_revisions(&self, range: RevisionRange) -> Result<Revision, DocError> {
|
pub async fn mk_revisions(&self, range: RevisionRange) -> Result<Revision, DocError> {
|
||||||
debug_assert!(&range.doc_id == &self.doc_id);
|
debug_assert!(range.doc_id == self.doc_id);
|
||||||
let revisions = self.rev_store.revs_in_range(range.clone()).await?;
|
let revisions = self.rev_store.revs_in_range(range.clone()).await?;
|
||||||
let mut new_delta = Delta::new();
|
let mut new_delta = Delta::new();
|
||||||
for revision in revisions {
|
for revision in revisions {
|
||||||
@ -73,7 +73,7 @@ impl RevisionManager {
|
|||||||
Ok(delta) => {
|
Ok(delta) => {
|
||||||
new_delta = new_delta.compose(&delta)?;
|
new_delta = new_delta.compose(&delta)?;
|
||||||
},
|
},
|
||||||
Err(_) => {},
|
Err(e) => log::error!("{}", e),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -64,12 +64,11 @@ impl RevisionStore {
|
|||||||
let revs_map = self.revs_map.clone();
|
let revs_map = self.revs_map.clone();
|
||||||
let mut rx = sender.subscribe();
|
let mut rx = sender.subscribe();
|
||||||
tokio::spawn(async move {
|
tokio::spawn(async move {
|
||||||
match rx.recv().await {
|
if let Ok(rev_id) = rx.recv().await {
|
||||||
Ok(rev_id) => match revs_map.get_mut(&rev_id) {
|
match revs_map.get_mut(&rev_id) {
|
||||||
None => {},
|
None => {},
|
||||||
Some(mut rev) => rev.value_mut().state = RevState::Acked,
|
Some(mut rev) => rev.value_mut().state = RevState::Acked,
|
||||||
},
|
}
|
||||||
Err(_) => {},
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -107,7 +106,7 @@ impl RevisionStore {
|
|||||||
|
|
||||||
*self.defer_save.write().await = Some(tokio::spawn(async move {
|
*self.defer_save.write().await = Some(tokio::spawn(async move {
|
||||||
tokio::time::sleep(Duration::from_millis(300)).await;
|
tokio::time::sleep(Duration::from_millis(300)).await;
|
||||||
let ids = revs_map.iter().map(|kv| kv.key().clone()).collect::<Vec<i64>>();
|
let ids = revs_map.iter().map(|kv| *kv.key()).collect::<Vec<i64>>();
|
||||||
let revisions_state = revs_map
|
let revisions_state = revs_map
|
||||||
.iter()
|
.iter()
|
||||||
.map(|kv| (kv.revision.clone(), kv.state))
|
.map(|kv| (kv.revision.clone(), kv.state))
|
||||||
@ -208,7 +207,7 @@ async fn fetch_from_local(doc_id: &str, persistence: Arc<Persistence>) -> DocRes
|
|||||||
None => {},
|
None => {},
|
||||||
Some(op) => {
|
Some(op) => {
|
||||||
let data = op.get_data();
|
let data = op.get_data();
|
||||||
if !data.ends_with("\n") {
|
if !data.ends_with('\n') {
|
||||||
delta.ops.push(Operation::Insert("\n".into()))
|
delta.ops.push(Operation::Insert("\n".into()))
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -232,7 +231,7 @@ fn validate_delta(doc_id: &str, persistence: Arc<Persistence>, conn: &SqliteConn
|
|||||||
}
|
}
|
||||||
|
|
||||||
let data = delta.ops.last().as_ref().unwrap().get_data();
|
let data = delta.ops.last().as_ref().unwrap().get_data();
|
||||||
if !data.ends_with("\n") {
|
if !data.ends_with('\n') {
|
||||||
log::error!("The op must end with newline");
|
log::error!("The op must end with newline");
|
||||||
let result = || {
|
let result = || {
|
||||||
let revisions = persistence.rev_sql.read_rev_tables(&doc_id, conn)?;
|
let revisions = persistence.rev_sql.read_rev_tables(&doc_id, conn)?;
|
||||||
|
@ -58,15 +58,10 @@ impl WsDocumentManager {
|
|||||||
fn listen_ws_state_changed(ws: Arc<dyn DocumentWebSocket>, handlers: Arc<DashMap<String, Arc<dyn WsDocumentHandler>>>) {
|
fn listen_ws_state_changed(ws: Arc<dyn DocumentWebSocket>, handlers: Arc<DashMap<String, Arc<dyn WsDocumentHandler>>>) {
|
||||||
let mut notify = ws.state_notify();
|
let mut notify = ws.state_notify();
|
||||||
tokio::spawn(async move {
|
tokio::spawn(async move {
|
||||||
loop {
|
while let Ok(state) = notify.recv().await {
|
||||||
match notify.recv().await {
|
handlers.iter().for_each(|handle| {
|
||||||
Ok(state) => {
|
handle.value().state_changed(&state);
|
||||||
handlers.iter().for_each(|handle| {
|
});
|
||||||
handle.value().state_changed(&state);
|
|
||||||
});
|
|
||||||
},
|
|
||||||
Err(_) => break,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -46,23 +46,23 @@ impl RevState {
|
|||||||
}
|
}
|
||||||
impl_sql_integer_expression!(RevState);
|
impl_sql_integer_expression!(RevState);
|
||||||
|
|
||||||
impl std::convert::Into<Revision> for RevTable {
|
impl std::convert::From<RevTable> for Revision {
|
||||||
fn into(self) -> Revision {
|
fn from(table: RevTable) -> Self {
|
||||||
let md5 = md5(&self.data);
|
let md5 = md5(&table.data);
|
||||||
Revision {
|
Revision {
|
||||||
base_rev_id: self.base_rev_id,
|
base_rev_id: table.base_rev_id,
|
||||||
rev_id: self.rev_id,
|
rev_id: table.rev_id,
|
||||||
delta_data: self.data,
|
delta_data: table.data,
|
||||||
md5,
|
md5,
|
||||||
doc_id: self.doc_id,
|
doc_id: table.doc_id,
|
||||||
ty: self.ty.into(),
|
ty: table.ty.into(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::convert::Into<RevTableType> for RevType {
|
impl std::convert::From<RevType> for RevTableType {
|
||||||
fn into(self) -> RevTableType {
|
fn from(ty: RevType) -> Self {
|
||||||
match self {
|
match ty {
|
||||||
RevType::Local => RevTableType::Local,
|
RevType::Local => RevTableType::Local,
|
||||||
RevType::Remote => RevTableType::Remote,
|
RevType::Remote => RevTableType::Remote,
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#![allow(clippy::module_inception)]
|
||||||
mod attribute_test;
|
mod attribute_test;
|
||||||
mod op_test;
|
mod op_test;
|
||||||
mod serde_test;
|
mod serde_test;
|
||||||
@ -9,7 +10,7 @@ use lib_ot::core::*;
|
|||||||
use rand::{prelude::*, Rng as WrappedRng};
|
use rand::{prelude::*, Rng as WrappedRng};
|
||||||
use std::{sync::Once, time::Duration};
|
use std::{sync::Once, time::Duration};
|
||||||
|
|
||||||
const LEVEL: &'static str = "debug";
|
const LEVEL: &str = "debug";
|
||||||
|
|
||||||
#[derive(Clone, Debug, Display)]
|
#[derive(Clone, Debug, Display)]
|
||||||
pub enum TestOp {
|
pub enum TestOp {
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#![allow(clippy::all)]
|
||||||
use crate::editor::{Rng, TestBuilder, TestOp::*};
|
use crate::editor::{Rng, TestBuilder, TestOp::*};
|
||||||
use flowy_document_infra::core::{FlowyDoc, PlainDoc};
|
use flowy_document_infra::core::{FlowyDoc, PlainDoc};
|
||||||
use lib_ot::core::*;
|
use lib_ot::core::*;
|
||||||
@ -229,8 +230,8 @@ fn delta_seek_4() {
|
|||||||
fn delta_seek_5() {
|
fn delta_seek_5() {
|
||||||
let mut delta = Delta::default();
|
let mut delta = Delta::default();
|
||||||
let attributes = AttributeBuilder::new()
|
let attributes = AttributeBuilder::new()
|
||||||
.add(Attribute::Bold(true))
|
.add_attr(Attribute::Bold(true))
|
||||||
.add(Attribute::Italic(true))
|
.add_attr(Attribute::Italic(true))
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
delta.add(OpBuilder::insert("1234").attributes(attributes.clone()).build());
|
delta.add(OpBuilder::insert("1234").attributes(attributes.clone()).build());
|
||||||
@ -474,7 +475,7 @@ fn transform_random_delta() {
|
|||||||
fn transform_with_two_delta_test() {
|
fn transform_with_two_delta_test() {
|
||||||
let mut a = Delta::default();
|
let mut a = Delta::default();
|
||||||
let mut a_s = String::new();
|
let mut a_s = String::new();
|
||||||
a.insert("123", AttributeBuilder::new().add(Attribute::Bold(true)).build());
|
a.insert("123", AttributeBuilder::new().add_attr(Attribute::Bold(true)).build());
|
||||||
a_s = a.apply(&a_s).unwrap();
|
a_s = a.apply(&a_s).unwrap();
|
||||||
assert_eq!(&a_s, "123");
|
assert_eq!(&a_s, "123");
|
||||||
|
|
||||||
|
@ -4,8 +4,8 @@ use lib_ot::core::*;
|
|||||||
#[test]
|
#[test]
|
||||||
fn operation_insert_serialize_test() {
|
fn operation_insert_serialize_test() {
|
||||||
let attributes = AttributeBuilder::new()
|
let attributes = AttributeBuilder::new()
|
||||||
.add(Attribute::Bold(true))
|
.add_attr(Attribute::Bold(true))
|
||||||
.add(Attribute::Italic(true))
|
.add_attr(Attribute::Italic(true))
|
||||||
.build();
|
.build();
|
||||||
let operation = OpBuilder::insert("123").attributes(attributes).build();
|
let operation = OpBuilder::insert("123").attributes(attributes).build();
|
||||||
let json = serde_json::to_string(&operation).unwrap();
|
let json = serde_json::to_string(&operation).unwrap();
|
||||||
@ -35,8 +35,8 @@ fn operation_delete_serialize_test() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn attributes_serialize_test() {
|
fn attributes_serialize_test() {
|
||||||
let attributes = AttributeBuilder::new()
|
let attributes = AttributeBuilder::new()
|
||||||
.add(Attribute::Bold(true))
|
.add_attr(Attribute::Bold(true))
|
||||||
.add(Attribute::Italic(true))
|
.add_attr(Attribute::Italic(true))
|
||||||
.build();
|
.build();
|
||||||
let retain = OpBuilder::insert("123").attributes(attributes).build();
|
let retain = OpBuilder::insert("123").attributes(attributes).build();
|
||||||
|
|
||||||
@ -49,8 +49,8 @@ fn delta_serialize_multi_attribute_test() {
|
|||||||
let mut delta = Delta::default();
|
let mut delta = Delta::default();
|
||||||
|
|
||||||
let attributes = AttributeBuilder::new()
|
let attributes = AttributeBuilder::new()
|
||||||
.add(Attribute::Bold(true))
|
.add_attr(Attribute::Bold(true))
|
||||||
.add(Attribute::Italic(true))
|
.add_attr(Attribute::Italic(true))
|
||||||
.build();
|
.build();
|
||||||
let retain = OpBuilder::insert("123").attributes(attributes).build();
|
let retain = OpBuilder::insert("123").attributes(attributes).build();
|
||||||
|
|
||||||
|
@ -23,7 +23,7 @@ impl WorkspaceDepsResolver {
|
|||||||
|
|
||||||
pub fn split_into(self) -> (Arc<dyn WorkspaceUser>, Arc<dyn WorkspaceDatabase>) {
|
pub fn split_into(self) -> (Arc<dyn WorkspaceUser>, Arc<dyn WorkspaceDatabase>) {
|
||||||
let user: Arc<dyn WorkspaceUser> = self.inner.clone();
|
let user: Arc<dyn WorkspaceUser> = self.inner.clone();
|
||||||
let database: Arc<dyn WorkspaceDatabase> = self.inner.clone();
|
let database: Arc<dyn WorkspaceDatabase> = self.inner;
|
||||||
(user, database)
|
(user, database)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -43,7 +43,7 @@ impl FlowySDKConfig {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn crate_log_filter(level: Option<String>) -> String {
|
fn crate_log_filter(level: Option<String>) -> String {
|
||||||
let level = level.unwrap_or(std::env::var("RUST_LOG").unwrap_or("info".to_owned()));
|
let level = level.unwrap_or_else(|| std::env::var("RUST_LOG").unwrap_or_else(|_| "info".to_owned()));
|
||||||
let mut filters = vec![];
|
let mut filters = vec![];
|
||||||
filters.push(format!("flowy_sdk={}", level));
|
filters.push(format!("flowy_sdk={}", level));
|
||||||
filters.push(format!("flowy_workspace={}", level));
|
filters.push(format!("flowy_workspace={}", level));
|
||||||
@ -110,33 +110,30 @@ async fn _listen_user_status(
|
|||||||
workspace_controller: Arc<WorkspaceController>,
|
workspace_controller: Arc<WorkspaceController>,
|
||||||
) {
|
) {
|
||||||
loop {
|
loop {
|
||||||
match subscribe.recv().await {
|
if let Ok(status) = subscribe.recv().await {
|
||||||
Ok(status) => {
|
let result = || async {
|
||||||
let result = || async {
|
match status {
|
||||||
match status {
|
UserStatus::Login { token } => {
|
||||||
UserStatus::Login { token } => {
|
let _ = workspace_controller.user_did_sign_in(&token).await?;
|
||||||
let _ = workspace_controller.user_did_sign_in(&token).await?;
|
},
|
||||||
},
|
UserStatus::Logout { .. } => {
|
||||||
UserStatus::Logout { .. } => {
|
workspace_controller.user_did_logout().await;
|
||||||
workspace_controller.user_did_logout().await;
|
},
|
||||||
},
|
UserStatus::Expired { .. } => {
|
||||||
UserStatus::Expired { .. } => {
|
workspace_controller.user_session_expired().await;
|
||||||
workspace_controller.user_session_expired().await;
|
},
|
||||||
},
|
UserStatus::SignUp { profile, ret } => {
|
||||||
UserStatus::SignUp { profile, ret } => {
|
let _ = workspace_controller.user_did_sign_up(&profile.token).await?;
|
||||||
let _ = workspace_controller.user_did_sign_up(&profile.token).await?;
|
let _ = ret.send(());
|
||||||
let _ = ret.send(());
|
},
|
||||||
},
|
|
||||||
}
|
|
||||||
Ok::<(), WorkspaceError>(())
|
|
||||||
};
|
|
||||||
|
|
||||||
match result().await {
|
|
||||||
Ok(_) => {},
|
|
||||||
Err(e) => log::error!("{}", e),
|
|
||||||
}
|
}
|
||||||
},
|
Ok::<(), WorkspaceError>(())
|
||||||
Err(_) => {},
|
};
|
||||||
|
|
||||||
|
match result().await {
|
||||||
|
Ok(_) => {},
|
||||||
|
Err(e) => log::error!("{}", e),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -163,8 +160,7 @@ fn mk_workspace(
|
|||||||
flowy_document: Arc<FlowyDocument>,
|
flowy_document: Arc<FlowyDocument>,
|
||||||
server_config: &ServerConfig,
|
server_config: &ServerConfig,
|
||||||
) -> Arc<WorkspaceController> {
|
) -> Arc<WorkspaceController> {
|
||||||
let workspace_deps = WorkspaceDepsResolver::new(user_session.clone());
|
let workspace_deps = WorkspaceDepsResolver::new(user_session);
|
||||||
let (user, database) = workspace_deps.split_into();
|
let (user, database) = workspace_deps.split_into();
|
||||||
let workspace_controller = flowy_workspace::module::mk_workspace(user, database, flowy_document, server_config);
|
flowy_workspace::module::mk_workspace(user, database, flowy_document, server_config)
|
||||||
workspace_controller
|
|
||||||
}
|
}
|
||||||
|
@ -10,15 +10,14 @@ pub fn mk_modules(workspace_controller: Arc<WorkspaceController>, user_session:
|
|||||||
vec![mk_user_module(user_session), mk_workspace_module(workspace_controller)]
|
vec![mk_user_module(user_session), mk_workspace_module(workspace_controller)]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_user_module(user_session: Arc<UserSession>) -> Module { flowy_user::module::create(user_session.clone()) }
|
fn mk_user_module(user_session: Arc<UserSession>) -> Module { flowy_user::module::create(user_session) }
|
||||||
|
|
||||||
fn mk_workspace_module(workspace_controller: Arc<WorkspaceController>) -> Module {
|
fn mk_workspace_module(workspace_controller: Arc<WorkspaceController>) -> Module {
|
||||||
flowy_workspace::module::create(workspace_controller)
|
flowy_workspace::module::create(workspace_controller)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mk_document_module(user_session: Arc<UserSession>, server_config: &ServerConfig) -> Arc<FlowyDocument> {
|
pub fn mk_document_module(user_session: Arc<UserSession>, server_config: &ServerConfig) -> Arc<FlowyDocument> {
|
||||||
let document_deps = DocumentDepsResolver::new(user_session.clone());
|
let document_deps = DocumentDepsResolver::new(user_session);
|
||||||
let (user, ws_manager) = document_deps.split_into();
|
let (user, ws_manager) = document_deps.split_into();
|
||||||
let document = Arc::new(FlowyDocument::new(user, ws_manager, server_config));
|
Arc::new(FlowyDocument::new(user, ws_manager, server_config))
|
||||||
document
|
|
||||||
}
|
}
|
||||||
|
@ -16,7 +16,7 @@ use std::{fs, path::PathBuf, sync::Arc};
|
|||||||
|
|
||||||
pub fn root_dir() -> String {
|
pub fn root_dir() -> String {
|
||||||
// https://doc.rust-lang.org/cargo/reference/environment-variables.html
|
// https://doc.rust-lang.org/cargo/reference/environment-variables.html
|
||||||
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or("./".to_owned());
|
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| "./".to_owned());
|
||||||
let mut path_buf = fs::canonicalize(&PathBuf::from(&manifest_dir)).unwrap();
|
let mut path_buf = fs::canonicalize(&PathBuf::from(&manifest_dir)).unwrap();
|
||||||
path_buf.pop(); // rust-lib
|
path_buf.pop(); // rust-lib
|
||||||
path_buf.push("flowy-test");
|
path_buf.push("flowy-test");
|
||||||
@ -36,9 +36,9 @@ pub fn login_email() -> String { "annie2@appflowy.io".to_string() }
|
|||||||
|
|
||||||
pub fn login_password() -> String { "HelloWorld!123".to_string() }
|
pub fn login_password() -> String { "HelloWorld!123".to_string() }
|
||||||
|
|
||||||
const DEFAULT_WORKSPACE_NAME: &'static str = "My workspace";
|
const DEFAULT_WORKSPACE_NAME: &str = "My workspace";
|
||||||
const DEFAULT_WORKSPACE_DESC: &'static str = "This is your first workspace";
|
const DEFAULT_WORKSPACE_DESC: &str = "This is your first workspace";
|
||||||
const DEFAULT_WORKSPACE: &'static str = "Default_Workspace";
|
const DEFAULT_WORKSPACE: &str = "Default_Workspace";
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub(crate) fn create_default_workspace_if_need(dispatch: Arc<EventDispatch>, user_id: &str) -> Result<(), UserError> {
|
pub(crate) fn create_default_workspace_if_need(dispatch: Arc<EventDispatch>, user_id: &str) -> Result<(), UserError> {
|
||||||
@ -62,13 +62,13 @@ pub(crate) fn create_default_workspace_if_need(dispatch: Arc<EventDispatch>, use
|
|||||||
|
|
||||||
let workspace = result.map_err(|e| UserError::internal().context(e))?;
|
let workspace = result.map_err(|e| UserError::internal().context(e))?;
|
||||||
let query: Bytes = QueryWorkspaceRequest {
|
let query: Bytes = QueryWorkspaceRequest {
|
||||||
workspace_id: Some(workspace.id.clone()),
|
workspace_id: Some(workspace.id),
|
||||||
}
|
}
|
||||||
.into_bytes()
|
.into_bytes()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let request = ModuleRequest::new(OpenWorkspace).payload(query);
|
let request = ModuleRequest::new(OpenWorkspace).payload(query);
|
||||||
let _result = EventDispatch::sync_send(dispatch.clone(), request)
|
let _result = EventDispatch::sync_send(dispatch, request)
|
||||||
.parse::<Workspace, WorkspaceError>()
|
.parse::<Workspace, WorkspaceError>()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@ -92,7 +92,7 @@ pub fn sign_up(dispatch: Arc<EventDispatch>) -> SignUpContext {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let request = ModuleRequest::new(SignUp).payload(payload);
|
let request = ModuleRequest::new(SignUp).payload(payload);
|
||||||
let user_profile = EventDispatch::sync_send(dispatch.clone(), request)
|
let user_profile = EventDispatch::sync_send(dispatch, request)
|
||||||
.parse::<UserProfile, UserError>()
|
.parse::<UserProfile, UserError>()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@ -132,12 +132,10 @@ fn sign_in(dispatch: Arc<EventDispatch>) -> UserProfile {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let request = ModuleRequest::new(SignIn).payload(payload);
|
let request = ModuleRequest::new(SignIn).payload(payload);
|
||||||
let user_profile = EventDispatch::sync_send(dispatch, request)
|
EventDispatch::sync_send(dispatch, request)
|
||||||
.parse::<UserProfile, UserError>()
|
.parse::<UserProfile, UserError>()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.unwrap();
|
.unwrap()
|
||||||
|
|
||||||
user_profile
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
|
@ -39,7 +39,7 @@ impl FlowyTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn setup_with(server_config: ServerConfig) -> Self {
|
pub fn setup_with(server_config: ServerConfig) -> Self {
|
||||||
let config = FlowySDKConfig::new(&root_dir(), server_config, &uuid().to_string()).log_filter("debug");
|
let config = FlowySDKConfig::new(&root_dir(), server_config, &uuid()).log_filter("debug");
|
||||||
let sdk = FlowySDK::new(config);
|
let sdk = FlowySDK::new(config);
|
||||||
Self { sdk }
|
Self { sdk }
|
||||||
}
|
}
|
||||||
|
@ -133,7 +133,7 @@ async fn open_workspace(sdk: &FlowyTestSDK, workspace_id: &str) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn read_workspace(sdk: &FlowyTestSDK, request: QueryWorkspaceRequest) -> Vec<Workspace> {
|
pub async fn read_workspace(sdk: &FlowyTestSDK, request: QueryWorkspaceRequest) -> Vec<Workspace> {
|
||||||
let mut repeated_workspace = FlowyWorkspaceTest::new(sdk.clone())
|
let repeated_workspace = FlowyWorkspaceTest::new(sdk.clone())
|
||||||
.event(ReadWorkspaces)
|
.event(ReadWorkspaces)
|
||||||
.request(request.clone())
|
.request(request.clone())
|
||||||
.async_send()
|
.async_send()
|
||||||
|
@ -2,7 +2,7 @@ use flowy_derive::ProtoBuf_Enum;
|
|||||||
|
|
||||||
use dart_notify::DartNotifyBuilder;
|
use dart_notify::DartNotifyBuilder;
|
||||||
|
|
||||||
const OBSERVABLE_CATEGORY: &'static str = "User";
|
const OBSERVABLE_CATEGORY: &str = "User";
|
||||||
|
|
||||||
#[derive(ProtoBuf_Enum, Debug)]
|
#[derive(ProtoBuf_Enum, Debug)]
|
||||||
pub(crate) enum UserNotification {
|
pub(crate) enum UserNotification {
|
||||||
@ -16,8 +16,8 @@ impl std::default::Default for UserNotification {
|
|||||||
fn default() -> Self { UserNotification::Unknown }
|
fn default() -> Self { UserNotification::Unknown }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::convert::Into<i32> for UserNotification {
|
impl std::convert::From<UserNotification> for i32 {
|
||||||
fn into(self) -> i32 { self as i32 }
|
fn from(notification: UserNotification) -> Self { notification as i32 }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn dart_notify(id: &str, ty: UserNotification) -> DartNotifyBuilder {
|
pub(crate) fn dart_notify(id: &str, ty: UserNotification) -> DartNotifyBuilder {
|
||||||
|
@ -5,8 +5,12 @@ pub struct UserSessionBuilder {
|
|||||||
config: Option<UserSessionConfig>,
|
config: Option<UserSessionConfig>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl std::default::Default for UserSessionBuilder {
|
||||||
|
fn default() -> Self { Self { config: None } }
|
||||||
|
}
|
||||||
|
|
||||||
impl UserSessionBuilder {
|
impl UserSessionBuilder {
|
||||||
pub fn new() -> Self { Self { config: None } }
|
pub fn new() -> Self { UserSessionBuilder::default() }
|
||||||
|
|
||||||
pub fn root_dir(mut self, dir: &str, server_config: &ServerConfig, session_cache_key: &str) -> Self {
|
pub fn root_dir(mut self, dir: &str, server_config: &ServerConfig, session_cache_key: &str) -> Self {
|
||||||
self.config = Some(UserSessionConfig::new(dir, server_config, session_cache_key));
|
self.config = Some(UserSessionConfig::new(dir, server_config, session_cache_key));
|
||||||
|
@ -33,7 +33,7 @@ impl UserDB {
|
|||||||
})?;
|
})?;
|
||||||
|
|
||||||
match DB_MAP.try_write_for(Duration::from_millis(300)) {
|
match DB_MAP.try_write_for(Duration::from_millis(300)) {
|
||||||
None => Err(UserError::internal().context(format!("Acquire write lock to save user db failed"))),
|
None => Err(UserError::internal().context("Acquire write lock to save user db failed")),
|
||||||
Some(mut write_guard) => {
|
Some(mut write_guard) => {
|
||||||
write_guard.insert(user_id.to_owned(), db);
|
write_guard.insert(user_id.to_owned(), db);
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -43,7 +43,7 @@ impl UserDB {
|
|||||||
|
|
||||||
pub(crate) fn close_user_db(&self, user_id: &str) -> Result<(), UserError> {
|
pub(crate) fn close_user_db(&self, user_id: &str) -> Result<(), UserError> {
|
||||||
match DB_MAP.try_write_for(Duration::from_millis(300)) {
|
match DB_MAP.try_write_for(Duration::from_millis(300)) {
|
||||||
None => Err(UserError::internal().context(format!("Acquire write lock to close user db failed"))),
|
None => Err(UserError::internal().context("Acquire write lock to close user db failed")),
|
||||||
Some(mut write_guard) => {
|
Some(mut write_guard) => {
|
||||||
set_user_db_init(false, user_id);
|
set_user_db_init(false, user_id);
|
||||||
write_guard.remove(user_id);
|
write_guard.remove(user_id);
|
||||||
@ -71,7 +71,7 @@ impl UserDB {
|
|||||||
}
|
}
|
||||||
|
|
||||||
match DB_MAP.try_read_for(Duration::from_millis(300)) {
|
match DB_MAP.try_read_for(Duration::from_millis(300)) {
|
||||||
None => Err(UserError::internal().context(format!("Acquire read lock to read user db failed"))),
|
None => Err(UserError::internal().context("Acquire read lock to read user db failed")),
|
||||||
Some(read_guard) => match read_guard.get(user_id) {
|
Some(read_guard) => match read_guard.get(user_id) {
|
||||||
None => Err(UserError::internal().context("Get connection failed. The database is not initialization")),
|
None => Err(UserError::internal().context("Get connection failed. The database is not initialization")),
|
||||||
Some(database) => Ok(database.get_pool()),
|
Some(database) => Ok(database.get_pool()),
|
||||||
@ -94,7 +94,7 @@ fn set_user_db_init(is_init: bool, user_id: &str) {
|
|||||||
fn is_user_db_init(user_id: &str) -> bool {
|
fn is_user_db_init(user_id: &str) -> bool {
|
||||||
match INIT_RECORD.lock().get(user_id) {
|
match INIT_RECORD.lock().get(user_id) {
|
||||||
None => false,
|
None => false,
|
||||||
Some(flag) => flag.clone(),
|
Some(flag) => *flag,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -74,23 +74,19 @@ impl UserSession {
|
|||||||
let server = construct_user_server(&config.server_config);
|
let server = construct_user_server(&config.server_config);
|
||||||
let ws_controller = Arc::new(WsController::new());
|
let ws_controller = Arc::new(WsController::new());
|
||||||
let (status_notifier, _) = broadcast::channel(10);
|
let (status_notifier, _) = broadcast::channel(10);
|
||||||
let user_session = Self {
|
Self {
|
||||||
database: db,
|
database: db,
|
||||||
config,
|
config,
|
||||||
server,
|
server,
|
||||||
session: RwLock::new(None),
|
session: RwLock::new(None),
|
||||||
ws_controller,
|
ws_controller,
|
||||||
status_notifier,
|
status_notifier,
|
||||||
};
|
}
|
||||||
user_session
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn init(&self) {
|
pub fn init(&self) {
|
||||||
match self.get_session() {
|
if let Ok(session) = self.get_session() {
|
||||||
Ok(session) => {
|
let _ = self.status_notifier.send(UserStatus::Login { token: session.token });
|
||||||
let _ = self.status_notifier.send(UserStatus::Login { token: session.token });
|
|
||||||
},
|
|
||||||
Err(_) => {},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -397,10 +393,9 @@ impl std::convert::From<String> for Session {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
impl std::convert::From<Session> for String {
|
||||||
impl std::convert::Into<String> for Session {
|
fn from(session: Session) -> Self {
|
||||||
fn into(self) -> String {
|
match serde_json::to_string(&session) {
|
||||||
match serde_json::to_string(&self) {
|
|
||||||
Ok(s) => s,
|
Ok(s) => s,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
log::error!("Serialize session to string failed: {:?}", e);
|
log::error!("Serialize session to string failed: {:?}", e);
|
||||||
|
@ -37,13 +37,13 @@ impl std::convert::From<SignInResponse> for UserTable {
|
|||||||
fn from(resp: SignInResponse) -> Self { UserTable::new(resp.user_id, resp.name, resp.email, resp.token) }
|
fn from(resp: SignInResponse) -> Self { UserTable::new(resp.user_id, resp.name, resp.email, resp.token) }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::convert::Into<UserProfile> for UserTable {
|
impl std::convert::From<UserTable> for UserProfile {
|
||||||
fn into(self) -> UserProfile {
|
fn from(table: UserTable) -> Self {
|
||||||
UserProfile {
|
UserProfile {
|
||||||
id: self.id,
|
id: table.id,
|
||||||
email: self.email,
|
email: table.email,
|
||||||
name: self.name,
|
name: table.name,
|
||||||
token: self.token,
|
token: table.token,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -114,6 +114,6 @@ pub(crate) async fn export_handler(
|
|||||||
controller: Unit<Arc<ViewController>>,
|
controller: Unit<Arc<ViewController>>,
|
||||||
) -> DataResult<ExportData, WorkspaceError> {
|
) -> DataResult<ExportData, WorkspaceError> {
|
||||||
let params: ExportParams = data.into_inner().try_into()?;
|
let params: ExportParams = data.into_inner().try_into()?;
|
||||||
let data = controller.export_doc(params.into()).await?;
|
let data = controller.export_doc(params).await?;
|
||||||
data_result(data)
|
data_result(data)
|
||||||
}
|
}
|
||||||
|
@ -53,15 +53,14 @@ pub fn mk_workspace(
|
|||||||
server.clone(),
|
server.clone(),
|
||||||
));
|
));
|
||||||
|
|
||||||
let workspace_controller = Arc::new(WorkspaceController::new(
|
Arc::new(WorkspaceController::new(
|
||||||
user.clone(),
|
user,
|
||||||
database.clone(),
|
database,
|
||||||
app_controller.clone(),
|
app_controller,
|
||||||
view_controller.clone(),
|
view_controller,
|
||||||
trash_can.clone(),
|
trash_can,
|
||||||
server.clone(),
|
server,
|
||||||
));
|
))
|
||||||
workspace_controller
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn create(workspace: Arc<WorkspaceController>) -> Module {
|
pub fn create(workspace: Arc<WorkspaceController>) -> Module {
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
use dart_notify::DartNotifyBuilder;
|
use dart_notify::DartNotifyBuilder;
|
||||||
use flowy_derive::ProtoBuf_Enum;
|
use flowy_derive::ProtoBuf_Enum;
|
||||||
const OBSERVABLE_CATEGORY: &'static str = "Workspace";
|
const OBSERVABLE_CATEGORY: &str = "Workspace";
|
||||||
|
|
||||||
// Opti: Using the Rust macro to generate the serde code automatically that can
|
// Opti: Using the Rust macro to generate the serde code automatically that can
|
||||||
// be use directly in flutter
|
// be use directly in flutter
|
||||||
@ -25,8 +25,8 @@ impl std::default::Default for WorkspaceNotification {
|
|||||||
fn default() -> Self { WorkspaceNotification::Unknown }
|
fn default() -> Self { WorkspaceNotification::Unknown }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::convert::Into<i32> for WorkspaceNotification {
|
impl std::convert::From<WorkspaceNotification> for i32 {
|
||||||
fn into(self) -> i32 { self as i32 }
|
fn from(notification: WorkspaceNotification) -> Self { notification as i32 }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug")]
|
#[tracing::instrument(level = "debug")]
|
||||||
|
@ -61,7 +61,7 @@ impl AppController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn save_app(&self, app: App, conn: &SqliteConnection) -> Result<(), WorkspaceError> {
|
pub(crate) fn save_app(&self, app: App, conn: &SqliteConnection) -> Result<(), WorkspaceError> {
|
||||||
let app_table = AppTable::new(app.clone());
|
let app_table = AppTable::new(app);
|
||||||
let _ = AppTableSql::create_app(app_table, &*conn)?;
|
let _ = AppTableSql::create_app(app_table, &*conn)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -176,9 +176,8 @@ impl AppController {
|
|||||||
Err(_e) => None,
|
Err(_e) => None,
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
match stream.next().await {
|
if let Some(event) = stream.next().await {
|
||||||
Some(event) => handle_trash_event(database.clone(), trash_can.clone(), event).await,
|
handle_trash_event(database.clone(), trash_can.clone(), event).await
|
||||||
None => {},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -25,16 +25,11 @@ impl WorkspaceServerAPI for WorkspaceServer {
|
|||||||
fn init(&self) {
|
fn init(&self) {
|
||||||
let mut rx = BACKEND_API_MIDDLEWARE.invalid_token_subscribe();
|
let mut rx = BACKEND_API_MIDDLEWARE.invalid_token_subscribe();
|
||||||
tokio::spawn(async move {
|
tokio::spawn(async move {
|
||||||
loop {
|
while let Ok(invalid_token) = rx.recv().await {
|
||||||
match rx.recv().await {
|
let error = WorkspaceError::new(ErrorCode::UserUnauthorized, "");
|
||||||
Ok(invalid_token) => {
|
send_dart_notification(&invalid_token, WorkspaceNotification::UserUnauthorized)
|
||||||
let error = WorkspaceError::new(ErrorCode::UserUnauthorized, "");
|
.error(error)
|
||||||
send_dart_notification(&invalid_token, WorkspaceNotification::UserUnauthorized)
|
.send()
|
||||||
.error(error)
|
|
||||||
.send()
|
|
||||||
},
|
|
||||||
Err(_) => {},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -283,11 +283,9 @@ impl ViewController {
|
|||||||
Err(_e) => None,
|
Err(_e) => None,
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
match stream.next().await {
|
|
||||||
Some(event) => {
|
if let Some(event) = stream.next().await {
|
||||||
handle_trash_event(database.clone(), document.clone(), trash_can.clone(), event).await
|
handle_trash_event(database.clone(), document.clone(), trash_can.clone(), event).await
|
||||||
},
|
|
||||||
None => {},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -48,9 +48,9 @@ impl WorkspaceController {
|
|||||||
Self {
|
Self {
|
||||||
user,
|
user,
|
||||||
workspace_sql,
|
workspace_sql,
|
||||||
|
view_controller,
|
||||||
database,
|
database,
|
||||||
app_controller,
|
app_controller,
|
||||||
view_controller,
|
|
||||||
trash_can,
|
trash_can,
|
||||||
server,
|
server,
|
||||||
}
|
}
|
||||||
@ -211,7 +211,7 @@ impl WorkspaceController {
|
|||||||
pub(crate) async fn open_workspace(&self, params: WorkspaceIdentifier) -> Result<Workspace, WorkspaceError> {
|
pub(crate) async fn open_workspace(&self, params: WorkspaceIdentifier) -> Result<Workspace, WorkspaceError> {
|
||||||
let user_id = self.user.user_id()?;
|
let user_id = self.user.user_id()?;
|
||||||
let conn = self.database.db_connection()?;
|
let conn = self.database.db_connection()?;
|
||||||
if let Some(workspace_id) = params.workspace_id.clone() {
|
if let Some(workspace_id) = params.workspace_id {
|
||||||
let workspace = self.read_local_workspace(workspace_id, &user_id, &*conn)?;
|
let workspace = self.read_local_workspace(workspace_id, &user_id, &*conn)?;
|
||||||
set_current_workspace(&workspace.id);
|
set_current_workspace(&workspace.id);
|
||||||
Ok(workspace)
|
Ok(workspace)
|
||||||
@ -227,7 +227,7 @@ impl WorkspaceController {
|
|||||||
let user_id = self.user.user_id()?;
|
let user_id = self.user.user_id()?;
|
||||||
let workspaces =
|
let workspaces =
|
||||||
self.read_local_workspaces(params.workspace_id.clone(), &user_id, &*self.database.db_connection()?)?;
|
self.read_local_workspaces(params.workspace_id.clone(), &user_id, &*self.database.db_connection()?)?;
|
||||||
let _ = self.read_workspaces_on_server(user_id.clone(), params.clone());
|
let _ = self.read_workspaces_on_server(user_id, params);
|
||||||
Ok(workspaces)
|
Ok(workspaces)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -239,13 +239,9 @@ impl WorkspaceController {
|
|||||||
};
|
};
|
||||||
let workspace = self.read_local_workspace(workspace_id, &user_id, &*self.database.db_connection()?)?;
|
let workspace = self.read_local_workspace(workspace_id, &user_id, &*self.database.db_connection()?)?;
|
||||||
|
|
||||||
let mut latest_view: Option<View> = None;
|
let latest_view: Option<View> = self.view_controller.latest_visit_view().unwrap_or(None);
|
||||||
match self.view_controller.latest_visit_view() {
|
|
||||||
Ok(view) => latest_view = view,
|
|
||||||
Err(_) => {},
|
|
||||||
}
|
|
||||||
let setting = CurrentWorkspaceSetting { workspace, latest_view };
|
let setting = CurrentWorkspaceSetting { workspace, latest_view };
|
||||||
let _ = self.read_workspaces_on_server(user_id.clone(), params)?;
|
let _ = self.read_workspaces_on_server(user_id, params)?;
|
||||||
Ok(setting)
|
Ok(setting)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -361,14 +357,14 @@ impl WorkspaceController {
|
|||||||
let _ = (&*conn).immediate_transaction::<_, WorkspaceError, _>(|| {
|
let _ = (&*conn).immediate_transaction::<_, WorkspaceError, _>(|| {
|
||||||
tracing::debug!("Save {} workspace", workspaces.len());
|
tracing::debug!("Save {} workspace", workspaces.len());
|
||||||
for workspace in &workspaces.items {
|
for workspace in &workspaces.items {
|
||||||
let mut m_workspace = workspace.clone();
|
let m_workspace = workspace.clone();
|
||||||
let apps = m_workspace.apps.into_inner();
|
let apps = m_workspace.apps.clone().into_inner();
|
||||||
let workspace_table = WorkspaceTable::new(m_workspace, &user_id);
|
let workspace_table = WorkspaceTable::new(m_workspace, &user_id);
|
||||||
|
|
||||||
let _ = workspace_sql.create_workspace(workspace_table, &*conn)?;
|
let _ = workspace_sql.create_workspace(workspace_table, &*conn)?;
|
||||||
tracing::debug!("Save {} apps", apps.len());
|
tracing::debug!("Save {} apps", apps.len());
|
||||||
for mut app in apps {
|
for app in apps {
|
||||||
let views = app.belongings.into_inner();
|
let views = app.belongings.clone().into_inner();
|
||||||
match app_ctrl.save_app(app, &*conn) {
|
match app_ctrl.save_app(app, &*conn) {
|
||||||
Ok(_) => {},
|
Ok(_) => {},
|
||||||
Err(e) => log::error!("create app failed: {:?}", e),
|
Err(e) => log::error!("create app failed: {:?}", e),
|
||||||
|
@ -47,13 +47,13 @@ impl AppTable {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::convert::Into<Trash> for AppTable {
|
impl std::convert::From<AppTable> for Trash {
|
||||||
fn into(self) -> Trash {
|
fn from(table: AppTable) -> Self {
|
||||||
Trash {
|
Trash {
|
||||||
id: self.id,
|
id: table.id,
|
||||||
name: self.name,
|
name: table.name,
|
||||||
modified_time: self.modified_time,
|
modified_time: table.modified_time,
|
||||||
create_time: self.create_time,
|
create_time: table.create_time,
|
||||||
ty: TrashType::App,
|
ty: TrashType::App,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -117,18 +117,17 @@ impl AppTableChangeset {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
impl std::convert::From<AppTable> for App {
|
||||||
impl std::convert::Into<App> for AppTable {
|
fn from(table: AppTable) -> Self {
|
||||||
fn into(self) -> App {
|
|
||||||
App {
|
App {
|
||||||
id: self.id,
|
id: table.id,
|
||||||
workspace_id: self.workspace_id,
|
workspace_id: table.workspace_id,
|
||||||
name: self.name,
|
name: table.name,
|
||||||
desc: self.desc,
|
desc: table.desc,
|
||||||
belongings: RepeatedView::default(),
|
belongings: RepeatedView::default(),
|
||||||
version: self.version,
|
version: table.version,
|
||||||
modified_time: self.modified_time,
|
modified_time: table.modified_time,
|
||||||
create_time: self.create_time,
|
create_time: table.create_time,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -12,14 +12,14 @@ pub(crate) struct TrashTable {
|
|||||||
pub create_time: i64,
|
pub create_time: i64,
|
||||||
pub ty: SqlTrashType,
|
pub ty: SqlTrashType,
|
||||||
}
|
}
|
||||||
impl std::convert::Into<Trash> for TrashTable {
|
impl std::convert::From<TrashTable> for Trash {
|
||||||
fn into(self) -> Trash {
|
fn from(table: TrashTable) -> Self {
|
||||||
Trash {
|
Trash {
|
||||||
id: self.id,
|
id: table.id,
|
||||||
name: self.name,
|
name: table.name,
|
||||||
modified_time: self.modified_time,
|
modified_time: table.modified_time,
|
||||||
create_time: self.create_time,
|
create_time: table.create_time,
|
||||||
ty: self.ty.into(),
|
ty: table.ty.into(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -77,9 +77,9 @@ impl std::convert::From<i32> for SqlTrashType {
|
|||||||
|
|
||||||
impl_sql_integer_expression!(SqlTrashType);
|
impl_sql_integer_expression!(SqlTrashType);
|
||||||
|
|
||||||
impl std::convert::Into<TrashType> for SqlTrashType {
|
impl std::convert::From<SqlTrashType> for TrashType {
|
||||||
fn into(self) -> TrashType {
|
fn from(ty: SqlTrashType) -> Self {
|
||||||
match self {
|
match ty {
|
||||||
SqlTrashType::Unknown => TrashType::Unknown,
|
SqlTrashType::Unknown => TrashType::Unknown,
|
||||||
SqlTrashType::View => TrashType::View,
|
SqlTrashType::View => TrashType::View,
|
||||||
SqlTrashType::App => TrashType::App,
|
SqlTrashType::App => TrashType::App,
|
||||||
|
@ -50,33 +50,33 @@ impl ViewTable {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::convert::Into<View> for ViewTable {
|
impl std::convert::From<ViewTable> for View {
|
||||||
fn into(self) -> View {
|
fn from(table: ViewTable) -> Self {
|
||||||
let view_type = match self.view_type {
|
let view_type = match table.view_type {
|
||||||
ViewTableType::Docs => ViewType::Doc,
|
ViewTableType::Docs => ViewType::Doc,
|
||||||
};
|
};
|
||||||
|
|
||||||
View {
|
View {
|
||||||
id: self.id,
|
id: table.id,
|
||||||
belong_to_id: self.belong_to_id,
|
belong_to_id: table.belong_to_id,
|
||||||
name: self.name,
|
name: table.name,
|
||||||
desc: self.desc,
|
desc: table.desc,
|
||||||
view_type,
|
view_type,
|
||||||
belongings: RepeatedView::default(),
|
belongings: RepeatedView::default(),
|
||||||
modified_time: self.modified_time,
|
modified_time: table.modified_time,
|
||||||
version: self.version,
|
version: table.version,
|
||||||
create_time: self.create_time,
|
create_time: table.create_time,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::convert::Into<Trash> for ViewTable {
|
impl std::convert::From<ViewTable> for Trash {
|
||||||
fn into(self) -> Trash {
|
fn from(table: ViewTable) -> Self {
|
||||||
Trash {
|
Trash {
|
||||||
id: self.id,
|
id: table.id,
|
||||||
name: self.name,
|
name: table.name,
|
||||||
modified_time: self.modified_time,
|
modified_time: table.modified_time,
|
||||||
create_time: self.create_time,
|
create_time: table.create_time,
|
||||||
ty: TrashType::View,
|
ty: TrashType::View,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -40,7 +40,7 @@ impl WorkspaceTableSql {
|
|||||||
.into_boxed();
|
.into_boxed();
|
||||||
|
|
||||||
if let Some(workspace_id) = workspace_id {
|
if let Some(workspace_id) = workspace_id {
|
||||||
filter = filter.filter(workspace_table::id.eq(workspace_id.to_owned()));
|
filter = filter.filter(workspace_table::id.eq(workspace_id));
|
||||||
};
|
};
|
||||||
|
|
||||||
let workspaces = filter.load::<WorkspaceTable>(conn)?;
|
let workspaces = filter.load::<WorkspaceTable>(conn)?;
|
||||||
|
@ -31,15 +31,15 @@ impl WorkspaceTable {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::convert::Into<Workspace> for WorkspaceTable {
|
impl std::convert::From<WorkspaceTable> for Workspace {
|
||||||
fn into(self) -> Workspace {
|
fn from(table: WorkspaceTable) -> Self {
|
||||||
Workspace {
|
Workspace {
|
||||||
id: self.id,
|
id: table.id,
|
||||||
name: self.name,
|
name: table.name,
|
||||||
desc: self.desc,
|
desc: table.desc,
|
||||||
apps: RepeatedApp::default(),
|
apps: RepeatedApp::default(),
|
||||||
modified_time: self.modified_time,
|
modified_time: table.modified_time,
|
||||||
create_time: self.create_time,
|
create_time: table.create_time,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#![allow(clippy::type_complexity)]
|
||||||
use crate::{module::WorkspaceUser, services::server::Server};
|
use crate::{module::WorkspaceUser, services::server::Server};
|
||||||
use lib_infra::retry::Action;
|
use lib_infra::retry::Action;
|
||||||
use pin_project::pin_project;
|
use pin_project::pin_project;
|
||||||
@ -27,7 +28,7 @@ impl<Fut, T, E> RetryAction<Fut, T, E> {
|
|||||||
Fut: Future<Output = Result<T, E>> + Send + Sync + 'static,
|
Fut: Future<Output = Result<T, E>> + Send + Sync + 'static,
|
||||||
F: Fn(String, Server) -> Fut + Send + Sync + 'static,
|
F: Fn(String, Server) -> Fut + Send + Sync + 'static,
|
||||||
{
|
{
|
||||||
let token = user.token().unwrap_or("".to_owned());
|
let token = user.token().unwrap_or_else(|_| "".to_owned());
|
||||||
Self {
|
Self {
|
||||||
token,
|
token,
|
||||||
server,
|
server,
|
||||||
|
@ -53,7 +53,7 @@ where
|
|||||||
T: std::convert::TryFrom<Bytes, Error = protobuf::ProtobufError>,
|
T: std::convert::TryFrom<Bytes, Error = protobuf::ProtobufError>,
|
||||||
{
|
{
|
||||||
fn parse_from_bytes(bytes: Bytes) -> Result<Self, DispatchError> {
|
fn parse_from_bytes(bytes: Bytes) -> Result<Self, DispatchError> {
|
||||||
let data = T::try_from(bytes.clone())?;
|
let data = T::try_from(bytes)?;
|
||||||
Ok(data)
|
Ok(data)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -76,7 +76,7 @@ where
|
|||||||
T: FromBytes,
|
T: FromBytes,
|
||||||
{
|
{
|
||||||
match payload {
|
match payload {
|
||||||
Payload::None => Err(InternalError::UnexpectedNone(format!("Parse fail, expected payload")).into()),
|
Payload::None => Err(InternalError::UnexpectedNone("Parse fail, expected payload".to_string()).into()),
|
||||||
Payload::Bytes(bytes) => {
|
Payload::Bytes(bytes) => {
|
||||||
let data = T::parse_from_bytes(bytes.clone())?;
|
let data = T::parse_from_bytes(bytes.clone())?;
|
||||||
Ok(Data(data))
|
Ok(Data(data))
|
||||||
|
@ -26,8 +26,7 @@ impl EventDispatch {
|
|||||||
tracing::trace!("{}", module_info(&modules));
|
tracing::trace!("{}", module_info(&modules));
|
||||||
let module_map = as_module_map(modules);
|
let module_map = as_module_map(modules);
|
||||||
|
|
||||||
let dispatch = EventDispatch { module_map, runtime };
|
EventDispatch { module_map, runtime }
|
||||||
dispatch
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn async_send<Req>(dispatch: Arc<EventDispatch>, request: Req) -> DispatchFuture<EventResponse>
|
pub fn async_send<Req>(dispatch: Arc<EventDispatch>, request: Req) -> DispatchFuture<EventResponse>
|
||||||
@ -99,9 +98,7 @@ where
|
|||||||
|
|
||||||
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||||
let this = self.as_mut().project();
|
let this = self.as_mut().project();
|
||||||
loop {
|
Poll::Ready(futures_core::ready!(this.fut.poll(cx)))
|
||||||
return Poll::Ready(futures_core::ready!(this.fut.poll(cx)));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -168,7 +165,7 @@ impl Service<DispatchContext> for DispatchService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
fn module_info(modules: &Vec<Module>) -> String {
|
fn module_info(modules: &[Module]) -> String {
|
||||||
let mut info = format!("{} modules loaded\n", modules.len());
|
let mut info = format!("{} modules loaded\n", modules.len());
|
||||||
for module in modules {
|
for module in modules {
|
||||||
info.push_str(&format!("-> {} loaded \n", module.name));
|
info.push_str(&format!("-> {} loaded \n", module.name));
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#![allow(clippy::module_inception)]
|
||||||
mod errors;
|
mod errors;
|
||||||
|
|
||||||
pub use errors::*;
|
pub use errors::*;
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#![allow(clippy::module_inception)]
|
||||||
pub use container::*;
|
pub use container::*;
|
||||||
pub use data::*;
|
pub use data::*;
|
||||||
pub use module::*;
|
pub use module::*;
|
||||||
|
@ -59,14 +59,18 @@ pub struct Module {
|
|||||||
service_map: Arc<HashMap<Event, EventServiceFactory>>,
|
service_map: Arc<HashMap<Event, EventServiceFactory>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Module {
|
impl std::default::Default for Module {
|
||||||
pub fn new() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self {
|
||||||
name: "".to_owned(),
|
name: "".to_owned(),
|
||||||
module_data: Arc::new(ModuleDataMap::new()),
|
module_data: Arc::new(ModuleDataMap::new()),
|
||||||
service_map: Arc::new(HashMap::new()),
|
service_map: Arc::new(HashMap::new()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Module {
|
||||||
|
pub fn new() -> Self { Module::default() }
|
||||||
|
|
||||||
pub fn name(mut self, s: &str) -> Self {
|
pub fn name(mut self, s: &str) -> Self {
|
||||||
self.name = s.to_owned();
|
self.name = s.to_owned();
|
||||||
@ -99,7 +103,7 @@ impl Module {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn events(&self) -> Vec<Event> { self.service_map.keys().map(|key| key.clone()).collect::<Vec<_>>() }
|
pub fn events(&self) -> Vec<Event> { self.service_map.keys().cloned().collect::<Vec<_>>() }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
@ -168,7 +172,7 @@ impl Service<ModuleRequest> for ModuleService {
|
|||||||
fn call(&self, request: ModuleRequest) -> Self::Future {
|
fn call(&self, request: ModuleRequest) -> Self::Future {
|
||||||
let ModuleRequest { id, event, payload } = request;
|
let ModuleRequest { id, event, payload } = request;
|
||||||
let module_data = self.module_data.clone();
|
let module_data = self.module_data.clone();
|
||||||
let request = EventRequest::new(id.clone(), event, module_data);
|
let request = EventRequest::new(id, event, module_data);
|
||||||
|
|
||||||
match self.service_map.get(&request.event) {
|
match self.service_map.get(&request.event) {
|
||||||
Some(factory) => {
|
Some(factory) => {
|
||||||
@ -200,10 +204,8 @@ impl Future for ModuleServiceFuture {
|
|||||||
type Output = Result<EventResponse, DispatchError>;
|
type Output = Result<EventResponse, DispatchError>;
|
||||||
|
|
||||||
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||||
loop {
|
let (_, response) = ready!(self.as_mut().project().fut.poll(cx))?.into_parts();
|
||||||
let (_, response) = ready!(self.as_mut().project().fut.poll(cx))?.into_parts();
|
Poll::Ready(Ok(response))
|
||||||
return Poll::Ready(Ok(response));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#![allow(clippy::module_inception)]
|
||||||
pub mod payload;
|
pub mod payload;
|
||||||
mod request;
|
mod request;
|
||||||
|
|
||||||
|
@ -25,26 +25,25 @@ fn format_payload_print(payload: &Payload, f: &mut Formatter<'_>) -> fmt::Result
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::convert::Into<Payload> for String {
|
impl std::convert::From<String> for Payload {
|
||||||
fn into(self) -> Payload { Payload::Bytes(Bytes::from(self)) }
|
fn from(s: String) -> Self { Payload::Bytes(Bytes::from(s)) }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::convert::Into<Payload> for &'_ String {
|
impl std::convert::From<&'_ String> for Payload {
|
||||||
fn into(self) -> Payload { Payload::Bytes(Bytes::from(self.to_owned())) }
|
fn from(s: &String) -> Self { Payload::Bytes(Bytes::from(s.to_owned())) }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::convert::Into<Payload> for Bytes {
|
impl std::convert::From<Bytes> for Payload {
|
||||||
fn into(self) -> Payload { Payload::Bytes(self) }
|
fn from(bytes: Bytes) -> Self { Payload::Bytes(bytes) }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::convert::Into<Payload> for () {
|
impl std::convert::From<()> for Payload {
|
||||||
fn into(self) -> Payload { Payload::None }
|
fn from(_: ()) -> Self { Payload::None }
|
||||||
|
}
|
||||||
|
impl std::convert::From<Vec<u8>> for Payload {
|
||||||
|
fn from(bytes: Vec<u8>) -> Self { Payload::Bytes(Bytes::from(bytes)) }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::convert::Into<Payload> for Vec<u8> {
|
impl std::convert::From<&str> for Payload {
|
||||||
fn into(self) -> Payload { Payload::Bytes(Bytes::from(self)) }
|
fn from(s: &str) -> Self { s.to_string().into() }
|
||||||
}
|
|
||||||
|
|
||||||
impl std::convert::Into<Payload> for &str {
|
|
||||||
fn into(self) -> Payload { self.to_string().into() }
|
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#![allow(clippy::module_inception)]
|
||||||
pub use builder::*;
|
pub use builder::*;
|
||||||
pub use responder::*;
|
pub use responder::*;
|
||||||
pub use response::*;
|
pub use response::*;
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#![allow(clippy::module_inception)]
|
||||||
mod boxed;
|
mod boxed;
|
||||||
mod handler;
|
mod handler;
|
||||||
mod service;
|
mod service;
|
||||||
|
@ -40,12 +40,11 @@ impl FlowySystem {
|
|||||||
});
|
});
|
||||||
|
|
||||||
let module_map = as_module_map(module_factory());
|
let module_map = as_module_map(module_factory());
|
||||||
sender_factory(module_map.clone(), &runtime);
|
sender_factory(module_map, &runtime);
|
||||||
|
|
||||||
let system = Self { sys_cmd_tx };
|
let system = Self { sys_cmd_tx };
|
||||||
FlowySystem::set_current(system);
|
FlowySystem::set_current(system);
|
||||||
let runner = SystemRunner { rt: runtime, stop_rx };
|
SystemRunner { rt: runtime, stop_rx }
|
||||||
runner
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
use std::{io, thread};
|
use std::{io, thread};
|
||||||
use thread_id;
|
|
||||||
use tokio::runtime;
|
use tokio::runtime;
|
||||||
|
|
||||||
pub mod ready;
|
pub mod ready;
|
||||||
|
@ -28,9 +28,7 @@ where
|
|||||||
|
|
||||||
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||||
let this = self.as_mut().project();
|
let this = self.as_mut().project();
|
||||||
loop {
|
Poll::Ready(ready!(this.fut.poll(cx)))
|
||||||
return Poll::Ready(ready!(this.fut.poll(cx)));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -60,9 +58,7 @@ where
|
|||||||
|
|
||||||
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||||
let this = self.as_mut().project();
|
let this = self.as_mut().project();
|
||||||
loop {
|
let result = ready!(this.fut.poll(cx));
|
||||||
let result = ready!(this.fut.poll(cx));
|
Poll::Ready(result)
|
||||||
return Poll::Ready(result);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
#![allow(clippy::module_inception)]
|
||||||
|
|
||||||
mod kv;
|
mod kv;
|
||||||
mod schema;
|
mod schema;
|
||||||
|
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
#![allow(clippy::large_enum_variant)]
|
||||||
|
#![allow(clippy::type_complexity)]
|
||||||
use crate::retry::FixedInterval;
|
use crate::retry::FixedInterval;
|
||||||
use pin_project::pin_project;
|
use pin_project::pin_project;
|
||||||
use std::{
|
use std::{
|
||||||
|
@ -33,7 +33,7 @@ pub trait PragmaExtension: ConnectionExtension {
|
|||||||
None => format!("PRAGMA {} = '{}'", key, val),
|
None => format!("PRAGMA {} = '{}'", key, val),
|
||||||
};
|
};
|
||||||
log::trace!("SQLITE {}", query);
|
log::trace!("SQLITE {}", query);
|
||||||
Ok(self.query::<ST, T>(&query)?)
|
self.query::<ST, T>(&query)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pragma_get<ST, T>(&self, key: &str, schema: Option<&str>) -> Result<T>
|
fn pragma_get<ST, T>(&self, key: &str, schema: Option<&str>) -> Result<T>
|
||||||
@ -45,7 +45,7 @@ pub trait PragmaExtension: ConnectionExtension {
|
|||||||
None => format!("PRAGMA {}", key),
|
None => format!("PRAGMA {}", key),
|
||||||
};
|
};
|
||||||
log::trace!("SQLITE {}", query);
|
log::trace!("SQLITE {}", query);
|
||||||
Ok(self.query::<ST, T>(&query)?)
|
self.query::<ST, T>(&query)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pragma_set_busy_timeout(&self, timeout_ms: i32) -> Result<i32> {
|
fn pragma_set_busy_timeout(&self, timeout_ms: i32) -> Result<i32> {
|
||||||
@ -59,7 +59,7 @@ pub trait PragmaExtension: ConnectionExtension {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn pragma_get_journal_mode(&self, schema: Option<&str>) -> Result<SQLiteJournalMode> {
|
fn pragma_get_journal_mode(&self, schema: Option<&str>) -> Result<SQLiteJournalMode> {
|
||||||
Ok(self.pragma_get::<Text, String>("journal_mode", schema)?.parse()?)
|
self.pragma_get::<Text, String>("journal_mode", schema)?.parse()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pragma_set_synchronous(&self, synchronous: SQLiteSynchronous, schema: Option<&str>) -> Result<()> {
|
fn pragma_set_synchronous(&self, synchronous: SQLiteSynchronous, schema: Option<&str>) -> Result<()> {
|
||||||
@ -67,7 +67,7 @@ pub trait PragmaExtension: ConnectionExtension {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn pragma_get_synchronous(&self, schema: Option<&str>) -> Result<SQLiteSynchronous> {
|
fn pragma_get_synchronous(&self, schema: Option<&str>) -> Result<SQLiteSynchronous> {
|
||||||
Ok(self.pragma_get::<Integer, i32>("synchronous", schema)?.try_into()?)
|
self.pragma_get::<Integer, i32>("synchronous", schema)?.try_into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl PragmaExtension for SqliteConnection {}
|
impl PragmaExtension for SqliteConnection {}
|
||||||
|
@ -25,9 +25,8 @@ impl DartEventCodeGen {
|
|||||||
for (index, render_ctx) in event_render_ctx.into_iter().enumerate() {
|
for (index, render_ctx) in event_render_ctx.into_iter().enumerate() {
|
||||||
let mut event_template = EventTemplate::new();
|
let mut event_template = EventTemplate::new();
|
||||||
|
|
||||||
match event_template.render(render_ctx, index) {
|
if let Some(content) = event_template.render(render_ctx, index) {
|
||||||
Some(content) => render_result.push_str(content.as_ref()),
|
render_result.push_str(content.as_ref())
|
||||||
None => {}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -89,7 +88,7 @@ pub fn parse_event_crate(event_crate: &DartEventCrate) -> Vec<EventASTContext> {
|
|||||||
ctxt.check().unwrap();
|
ctxt.check().unwrap();
|
||||||
attrs
|
attrs
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|attr| attr.attrs.event_attrs.ignore == false)
|
.filter(|attr| !attr.attrs.event_attrs.ignore)
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.map(|(_index, attr)| EventASTContext::from(&attr.attrs))
|
.map(|(_index, attr)| EventASTContext::from(&attr.attrs))
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
@ -103,30 +102,30 @@ pub fn parse_event_crate(event_crate: &DartEventCrate) -> Vec<EventASTContext> {
|
|||||||
.collect::<Vec<EventASTContext>>()
|
.collect::<Vec<EventASTContext>>()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ast_to_event_render_ctx(ast: &Vec<EventASTContext>) -> Vec<EventRenderContext> {
|
pub fn ast_to_event_render_ctx(ast: &[EventASTContext]) -> Vec<EventRenderContext> {
|
||||||
ast.iter()
|
ast.iter()
|
||||||
.map(|event_ast| {
|
.map(|event_ast| {
|
||||||
let input_deserializer = match event_ast.event_input {
|
let input_deserializer = event_ast
|
||||||
Some(ref event_input) => Some(event_input.get_ident().unwrap().to_string()),
|
.event_input
|
||||||
None => None,
|
.as_ref()
|
||||||
};
|
.map(|event_input| event_input.get_ident().unwrap().to_string());
|
||||||
|
|
||||||
let output_deserializer = match event_ast.event_output {
|
let output_deserializer = event_ast
|
||||||
Some(ref event_output) => Some(event_output.get_ident().unwrap().to_string()),
|
.event_output
|
||||||
None => None,
|
.as_ref()
|
||||||
};
|
.map(|event_output| event_output.get_ident().unwrap().to_string());
|
||||||
// eprintln!(
|
// eprintln!(
|
||||||
// "😁 {:?} / {:?}",
|
// "😁 {:?} / {:?}",
|
||||||
// event_ast.event_input, event_ast.event_output
|
// event_ast.event_input, event_ast.event_output
|
||||||
// );
|
// );
|
||||||
|
|
||||||
return EventRenderContext {
|
EventRenderContext {
|
||||||
input_deserializer,
|
input_deserializer,
|
||||||
output_deserializer,
|
output_deserializer,
|
||||||
error_deserializer: event_ast.event_error.clone(),
|
error_deserializer: event_ast.event_error.clone(),
|
||||||
event: event_ast.event.to_string(),
|
event: event_ast.event.to_string(),
|
||||||
event_ty: event_ast.event_ty.to_string(),
|
event_ty: event_ast.event_ty.to_string(),
|
||||||
};
|
}
|
||||||
})
|
})
|
||||||
.collect::<Vec<EventRenderContext>>()
|
.collect::<Vec<EventRenderContext>>()
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,7 @@ pub struct EventTemplate {
|
|||||||
tera_context: Context,
|
tera_context: Context,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const DART_IMPORTED: &'static str = r#"
|
pub const DART_IMPORTED: &str = r#"
|
||||||
/// Auto gen code from rust ast, do not edit
|
/// Auto gen code from rust ast, do not edit
|
||||||
part of 'dispatch.dart';
|
part of 'dispatch.dart';
|
||||||
"#;
|
"#;
|
||||||
@ -21,9 +21,9 @@ pub struct EventRenderContext {
|
|||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
impl EventTemplate {
|
impl EventTemplate {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
return EventTemplate {
|
EventTemplate {
|
||||||
tera_context: Context::new(),
|
tera_context: Context::new(),
|
||||||
};
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn render(&mut self, ctx: EventRenderContext, index: usize) -> Option<String> {
|
pub fn render(&mut self, ctx: EventRenderContext, index: usize) -> Option<String> {
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#![allow(clippy::module_inception)]
|
||||||
mod dart_event;
|
mod dart_event;
|
||||||
mod event_template;
|
mod event_template;
|
||||||
|
|
||||||
|
@ -33,7 +33,7 @@ fn parse_files_protobuf(proto_crate_path: &str, proto_output_dir: &str) -> Vec<P
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_entry(|e| !is_hidden(e))
|
.filter_entry(|e| !is_hidden(e))
|
||||||
.filter_map(|e| e.ok())
|
.filter_map(|e| e.ok())
|
||||||
.filter(|e| e.file_type().is_dir() == false)
|
.filter(|e| !e.file_type().is_dir())
|
||||||
.map(|e| {
|
.map(|e| {
|
||||||
let path = e.path().to_str().unwrap().to_string();
|
let path = e.path().to_str().unwrap().to_string();
|
||||||
let file_name = e.path().file_stem().unwrap().to_str().unwrap().to_string();
|
let file_name = e.path().file_stem().unwrap().to_str().unwrap().to_string();
|
||||||
@ -64,7 +64,7 @@ fn parse_files_protobuf(proto_crate_path: &str, proto_output_dir: &str) -> Vec<P
|
|||||||
|
|
||||||
let s = struct_template.render().unwrap();
|
let s = struct_template.render().unwrap();
|
||||||
proto_file_content.push_str(s.as_ref());
|
proto_file_content.push_str(s.as_ref());
|
||||||
proto_file_content.push_str("\n");
|
proto_file_content.push('\n');
|
||||||
});
|
});
|
||||||
|
|
||||||
let enums = get_ast_enums(&ast);
|
let enums = get_ast_enums(&ast);
|
||||||
@ -73,7 +73,7 @@ fn parse_files_protobuf(proto_crate_path: &str, proto_output_dir: &str) -> Vec<P
|
|||||||
enum_template.set_message_enum(&e);
|
enum_template.set_message_enum(&e);
|
||||||
let s = enum_template.render().unwrap();
|
let s = enum_template.render().unwrap();
|
||||||
proto_file_content.push_str(s.as_ref());
|
proto_file_content.push_str(s.as_ref());
|
||||||
proto_file_content.push_str("\n");
|
proto_file_content.push('\n');
|
||||||
});
|
});
|
||||||
|
|
||||||
if !enums.is_empty() || !structs.is_empty() {
|
if !enums.is_empty() || !structs.is_empty() {
|
||||||
@ -95,7 +95,7 @@ pub fn parse_or_init_proto_file(path: &str) -> String {
|
|||||||
let mut proto_file_content = String::new();
|
let mut proto_file_content = String::new();
|
||||||
let imported_content = find_proto_file_import(path);
|
let imported_content = find_proto_file_import(path);
|
||||||
proto_file_content.push_str(imported_content.as_ref());
|
proto_file_content.push_str(imported_content.as_ref());
|
||||||
proto_file_content.push_str("\n");
|
proto_file_content.push('\n');
|
||||||
proto_file_content
|
proto_file_content
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -105,8 +105,8 @@ pub fn get_ast_structs(ast: &syn::File) -> Vec<Struct> {
|
|||||||
// file.write_all(content.as_bytes()).unwrap();
|
// file.write_all(content.as_bytes()).unwrap();
|
||||||
let ctxt = Ctxt::new();
|
let ctxt = Ctxt::new();
|
||||||
let mut proto_structs: Vec<Struct> = vec![];
|
let mut proto_structs: Vec<Struct> = vec![];
|
||||||
ast.items.iter().for_each(|item| match item {
|
ast.items.iter().for_each(|item| {
|
||||||
Item::Struct(item_struct) => {
|
if let Item::Struct(item_struct) = item {
|
||||||
let (_, fields) = struct_from_ast(&ctxt, &item_struct.fields);
|
let (_, fields) = struct_from_ast(&ctxt, &item_struct.fields);
|
||||||
|
|
||||||
if fields
|
if fields
|
||||||
@ -121,7 +121,6 @@ pub fn get_ast_structs(ast: &syn::File) -> Vec<Struct> {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {}
|
|
||||||
});
|
});
|
||||||
ctxt.check().unwrap();
|
ctxt.check().unwrap();
|
||||||
proto_structs
|
proto_structs
|
||||||
@ -133,20 +132,13 @@ pub fn get_ast_enums(ast: &syn::File) -> Vec<FlowyEnum> {
|
|||||||
|
|
||||||
ast.items.iter().for_each(|item| {
|
ast.items.iter().for_each(|item| {
|
||||||
// https://docs.rs/syn/1.0.54/syn/enum.Item.html
|
// https://docs.rs/syn/1.0.54/syn/enum.Item.html
|
||||||
match item {
|
if let Item::Enum(item_enum) = item {
|
||||||
Item::Enum(item_enum) => {
|
let attrs =
|
||||||
let attrs = flowy_ast::enum_from_ast(
|
flowy_ast::enum_from_ast(&ctxt, &item_enum.ident, &item_enum.variants, &ast.attrs);
|
||||||
&ctxt,
|
flowy_enums.push(FlowyEnum {
|
||||||
&item_enum.ident,
|
name: item_enum.ident.to_string(),
|
||||||
&item_enum.variants,
|
attrs,
|
||||||
&ast.attrs,
|
});
|
||||||
);
|
|
||||||
flowy_enums.push(FlowyEnum {
|
|
||||||
name: item_enum.ident.to_string(),
|
|
||||||
attrs,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
ctxt.check().unwrap();
|
ctxt.check().unwrap();
|
||||||
@ -182,18 +174,14 @@ fn find_proto_file_import(path: &str) -> String {
|
|||||||
|
|
||||||
content.lines().for_each(|line| {
|
content.lines().for_each(|line| {
|
||||||
////Result<Option<Match<'t>>>
|
////Result<Option<Match<'t>>>
|
||||||
if let Ok(some_line) = SYNTAX_REGEX.find(line) {
|
if let Ok(Some(m)) = SYNTAX_REGEX.find(line) {
|
||||||
if let Some(m) = some_line {
|
result.push_str(m.as_str());
|
||||||
result.push_str(m.as_str());
|
result.push('\n');
|
||||||
result.push_str("\n");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Ok(some_line) = IMPORT_REGEX.find(line) {
|
if let Ok(Some(m)) = IMPORT_REGEX.find(line) {
|
||||||
if let Some(m) = some_line {
|
result.push_str(m.as_str());
|
||||||
result.push_str(m.as_str());
|
result.push('\n');
|
||||||
result.push_str("\n");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -27,7 +27,7 @@ impl ProtoGen {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_proto_files(crate_infos: &Vec<CrateProtoInfo>) {
|
fn write_proto_files(crate_infos: &[CrateProtoInfo]) {
|
||||||
for crate_info in crate_infos {
|
for crate_info in crate_infos {
|
||||||
let dir = crate_info.inner.proto_file_output_dir();
|
let dir = crate_info.inner.proto_file_output_dir();
|
||||||
crate_info.files.iter().for_each(|info| {
|
crate_info.files.iter().for_each(|info| {
|
||||||
@ -41,7 +41,7 @@ fn write_proto_files(crate_infos: &Vec<CrateProtoInfo>) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_rust_crate_mod_file(crate_infos: &Vec<CrateProtoInfo>) {
|
fn write_rust_crate_mod_file(crate_infos: &[CrateProtoInfo]) {
|
||||||
for crate_info in crate_infos {
|
for crate_info in crate_infos {
|
||||||
let mod_path = crate_info.inner.proto_model_mod_file();
|
let mod_path = crate_info.inner.proto_model_mod_file();
|
||||||
match OpenOptions::new()
|
match OpenOptions::new()
|
||||||
@ -56,7 +56,7 @@ fn write_rust_crate_mod_file(crate_infos: &Vec<CrateProtoInfo>) {
|
|||||||
mod_file_content.push_str("// Auto-generated, do not edit \n");
|
mod_file_content.push_str("// Auto-generated, do not edit \n");
|
||||||
walk_dir(
|
walk_dir(
|
||||||
crate_info.inner.proto_file_output_dir().as_ref(),
|
crate_info.inner.proto_file_output_dir().as_ref(),
|
||||||
|e| e.file_type().is_dir() == false,
|
|e| !e.file_type().is_dir(),
|
||||||
|_, name| {
|
|_, name| {
|
||||||
let c = format!("\nmod {}; \npub use {}::*; \n", &name, &name);
|
let c = format!("\nmod {}; \npub use {}::*; \n", &name, &name);
|
||||||
mod_file_content.push_str(c.as_ref());
|
mod_file_content.push_str(c.as_ref());
|
||||||
@ -72,7 +72,7 @@ fn write_rust_crate_mod_file(crate_infos: &Vec<CrateProtoInfo>) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn write_flutter_protobuf_package_mod_file(
|
fn write_flutter_protobuf_package_mod_file(
|
||||||
crate_infos: &Vec<CrateProtoInfo>,
|
crate_infos: &[CrateProtoInfo],
|
||||||
package_info: &FlutterProtobufInfo,
|
package_info: &FlutterProtobufInfo,
|
||||||
) {
|
) {
|
||||||
let model_dir = package_info.model_dir();
|
let model_dir = package_info.model_dir();
|
||||||
@ -91,7 +91,7 @@ fn write_flutter_protobuf_package_mod_file(
|
|||||||
|
|
||||||
walk_dir(
|
walk_dir(
|
||||||
crate_info.inner.proto_file_output_dir().as_ref(),
|
crate_info.inner.proto_file_output_dir().as_ref(),
|
||||||
|e| e.file_type().is_dir() == false,
|
|e| !e.file_type().is_dir(),
|
||||||
|_, name| {
|
|_, name| {
|
||||||
let c = format!("export './{}.pb.dart';\n", &name);
|
let c = format!("export './{}.pb.dart';\n", &name);
|
||||||
mod_file_content.push_str(c.as_ref());
|
mod_file_content.push_str(c.as_ref());
|
||||||
@ -108,7 +108,7 @@ fn write_flutter_protobuf_package_mod_file(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_rust_protoc(crate_infos: &Vec<CrateProtoInfo>) {
|
fn run_rust_protoc(crate_infos: &[CrateProtoInfo]) {
|
||||||
for crate_info in crate_infos {
|
for crate_info in crate_infos {
|
||||||
let rust_out = crate_info.inner.proto_struct_output_dir();
|
let rust_out = crate_info.inner.proto_struct_output_dir();
|
||||||
let proto_path = crate_info.inner.proto_file_output_dir();
|
let proto_path = crate_info.inner.proto_file_output_dir();
|
||||||
@ -130,7 +130,7 @@ fn run_rust_protoc(crate_infos: &Vec<CrateProtoInfo>) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_flutter_protoc(crate_infos: &Vec<CrateProtoInfo>, package_info: &FlutterProtobufInfo) {
|
fn run_flutter_protoc(crate_infos: &[CrateProtoInfo], package_info: &FlutterProtobufInfo) {
|
||||||
let model_dir = package_info.model_dir();
|
let model_dir = package_info.model_dir();
|
||||||
if !Path::new(&model_dir).exists() {
|
if !Path::new(&model_dir).exists() {
|
||||||
std::fs::create_dir_all(&model_dir).unwrap();
|
std::fs::create_dir_all(&model_dir).unwrap();
|
||||||
@ -158,11 +158,8 @@ fn run_flutter_protoc(crate_infos: &Vec<CrateProtoInfo>, package_info: &FlutterP
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn remove_everything_in_dir(dir: &str) {
|
fn remove_everything_in_dir(dir: &str) {
|
||||||
if Path::new(dir).exists() {
|
if Path::new(dir).exists() && std::fs::remove_dir_all(dir).is_err() {
|
||||||
if std::fs::remove_dir_all(dir).is_err()
|
panic!("Reset protobuf directory failed")
|
||||||
{
|
|
||||||
panic!("Reset protobuf directory failed")
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
std::fs::create_dir_all(dir).unwrap();
|
std::fs::create_dir_all(dir).unwrap();
|
||||||
}
|
}
|
||||||
|
@ -102,7 +102,7 @@ pub fn parse_crate_info_from_path(root: &str) -> Vec<ProtobufCrate> {
|
|||||||
.filter_map(|e| e.ok())
|
.filter_map(|e| e.ok())
|
||||||
.filter(|e| is_crate_dir(e))
|
.filter(|e| is_crate_dir(e))
|
||||||
.flat_map(|e| parse_crate_config_from(&e))
|
.flat_map(|e| parse_crate_config_from(&e))
|
||||||
.map(|crate_config| ProtobufCrate::from_config(crate_config))
|
.map(ProtobufCrate::from_config)
|
||||||
.collect::<Vec<ProtobufCrate>>()
|
.collect::<Vec<ProtobufCrate>>()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -15,11 +15,11 @@ pub struct ProtobufDeriveMeta {
|
|||||||
impl ProtobufDeriveMeta {
|
impl ProtobufDeriveMeta {
|
||||||
pub fn new(structs: Vec<String>, enums: Vec<String>) -> Self {
|
pub fn new(structs: Vec<String>, enums: Vec<String>) -> Self {
|
||||||
let enums: Vec<_> = enums.into_iter().unique().collect();
|
let enums: Vec<_> = enums.into_iter().unique().collect();
|
||||||
return ProtobufDeriveMeta {
|
ProtobufDeriveMeta {
|
||||||
context: Context::new(),
|
context: Context::new(),
|
||||||
structs,
|
structs,
|
||||||
enums,
|
enums,
|
||||||
};
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn render(&mut self) -> Option<String> {
|
pub fn render(&mut self) -> Option<String> {
|
||||||
@ -37,7 +37,7 @@ impl ProtobufDeriveMeta {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write_derive_meta(crate_infos: &Vec<CrateProtoInfo>, derive_meta_dir: &str) {
|
pub fn write_derive_meta(crate_infos: &[CrateProtoInfo], derive_meta_dir: &str) {
|
||||||
let file_proto_infos = crate_infos
|
let file_proto_infos = crate_infos
|
||||||
.iter()
|
.iter()
|
||||||
.map(|ref crate_info| &crate_info.files)
|
.map(|ref crate_info| &crate_info.files)
|
||||||
@ -58,7 +58,7 @@ pub fn write_derive_meta(crate_infos: &Vec<CrateProtoInfo>, derive_meta_dir: &st
|
|||||||
let mut derive_template = ProtobufDeriveMeta::new(structs, enums);
|
let mut derive_template = ProtobufDeriveMeta::new(structs, enums);
|
||||||
let new_content = derive_template.render().unwrap();
|
let new_content = derive_template.render().unwrap();
|
||||||
let old_content = read_file(derive_meta_dir).unwrap();
|
let old_content = read_file(derive_meta_dir).unwrap();
|
||||||
if new_content.clone() == old_content {
|
if new_content == old_content {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// println!("{}", diff_lines(&old_content, &new_content));
|
// println!("{}", diff_lines(&old_content, &new_content));
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#![allow(clippy::module_inception)]
|
||||||
mod derive_meta;
|
mod derive_meta;
|
||||||
|
|
||||||
pub use derive_meta::*;
|
pub use derive_meta::*;
|
||||||
|
@ -10,10 +10,10 @@ pub struct EnumTemplate {
|
|||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
impl EnumTemplate {
|
impl EnumTemplate {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
return EnumTemplate {
|
EnumTemplate {
|
||||||
context: Context::new(),
|
context: Context::new(),
|
||||||
items: vec![],
|
items: vec![],
|
||||||
};
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_message_enum(&mut self, flowy_enum: &FlowyEnum) {
|
pub fn set_message_enum(&mut self, flowy_enum: &FlowyEnum) {
|
||||||
|
@ -24,10 +24,10 @@ pub struct StructTemplate {
|
|||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
impl StructTemplate {
|
impl StructTemplate {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
return StructTemplate {
|
StructTemplate {
|
||||||
context: Context::new(),
|
context: Context::new(),
|
||||||
fields: vec![],
|
fields: vec![],
|
||||||
};
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_message_struct_name(&mut self, name: &str) {
|
pub fn set_message_struct_name(&mut self, name: &str) {
|
||||||
@ -46,8 +46,8 @@ impl StructTemplate {
|
|||||||
mapped_ty = RUST_TYPE_MAP[ty];
|
mapped_ty = RUST_TYPE_MAP[ty];
|
||||||
}
|
}
|
||||||
|
|
||||||
match field.bracket_category {
|
if let Some(ref category) = field.bracket_category {
|
||||||
Some(ref category) => match category {
|
match category {
|
||||||
BracketCategory::Opt => match &field.bracket_inner_ty {
|
BracketCategory::Opt => match &field.bracket_inner_ty {
|
||||||
None => {}
|
None => {}
|
||||||
Some(inner_ty) => match inner_ty.to_string().as_str() {
|
Some(inner_ty) => match inner_ty.to_string().as_str() {
|
||||||
@ -93,8 +93,7 @@ impl StructTemplate {
|
|||||||
BracketCategory::Other => self
|
BracketCategory::Other => self
|
||||||
.fields
|
.fields
|
||||||
.push(format!("{} {} = {};", mapped_ty, name, index)),
|
.push(format!("{} {} = {};", mapped_ty, name, index)),
|
||||||
},
|
}
|
||||||
None => {}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -40,7 +40,7 @@ pub fn save_content_to_file_with_diff_prompt(content: &str, output_file: &str, _
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
if new_content != old_content {
|
if new_content != old_content {
|
||||||
print_diff(old_content.clone(), new_content.clone());
|
print_diff(old_content, new_content.clone());
|
||||||
write_to_file()
|
write_to_file()
|
||||||
// if force_write {
|
// if force_write {
|
||||||
// write_to_file()
|
// write_to_file()
|
||||||
@ -98,8 +98,7 @@ pub fn get_tera(directory: &str) -> Tera {
|
|||||||
.display()
|
.display()
|
||||||
.to_string();
|
.to_string();
|
||||||
let mut template_path = format!("{}/**/*.tera", root_absolute_path);
|
let mut template_path = format!("{}/**/*.tera", root_absolute_path);
|
||||||
if cfg!(windows)
|
if cfg!(windows) {
|
||||||
{
|
|
||||||
// remove "\\?\" prefix on windows
|
// remove "\\?\" prefix on windows
|
||||||
template_path = format!("{}/**/*.tera", &root_absolute_path[4..]);
|
template_path = format!("{}/**/*.tera", &root_absolute_path[4..]);
|
||||||
}
|
}
|
||||||
@ -115,7 +114,7 @@ pub fn get_tera(directory: &str) -> Tera {
|
|||||||
|
|
||||||
pub fn is_crate_dir(e: &walkdir::DirEntry) -> bool {
|
pub fn is_crate_dir(e: &walkdir::DirEntry) -> bool {
|
||||||
let cargo = e.path().file_stem().unwrap().to_str().unwrap().to_string();
|
let cargo = e.path().file_stem().unwrap().to_str().unwrap().to_string();
|
||||||
cargo == "Cargo".to_string()
|
cargo == *"Cargo"
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_proto_file(e: &walkdir::DirEntry) -> bool {
|
pub fn is_proto_file(e: &walkdir::DirEntry) -> bool {
|
||||||
@ -123,14 +122,14 @@ pub fn is_proto_file(e: &walkdir::DirEntry) -> bool {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
let ext = e.path().extension().unwrap().to_str().unwrap().to_string();
|
let ext = e.path().extension().unwrap().to_str().unwrap().to_string();
|
||||||
ext == "proto".to_string()
|
ext == *"proto"
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_hidden(entry: &walkdir::DirEntry) -> bool {
|
pub fn is_hidden(entry: &walkdir::DirEntry) -> bool {
|
||||||
entry
|
entry
|
||||||
.file_name()
|
.file_name()
|
||||||
.to_str()
|
.to_str()
|
||||||
.map(|s| s.starts_with("."))
|
.map(|s| s.starts_with('.'))
|
||||||
.unwrap_or(false)
|
.unwrap_or(false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
pub const HOST: &'static str = "localhost:8000";
|
pub const HOST: &str = "localhost:8000";
|
||||||
pub const HTTP_SCHEMA: &'static str = "http";
|
pub const HTTP_SCHEMA: &str = "http";
|
||||||
pub const WS_SCHEMA: &'static str = "ws";
|
pub const WS_SCHEMA: &str = "ws";
|
||||||
pub const HEADER_TOKEN: &'static str = "token";
|
pub const HEADER_TOKEN: &str = "token";
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct ServerConfig {
|
pub struct ServerConfig {
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#![allow(clippy::module_inception)]
|
||||||
mod request;
|
mod request;
|
||||||
|
|
||||||
pub use request::*;
|
pub use request::*;
|
||||||
|
@ -23,8 +23,8 @@ pub struct HttpRequestBuilder {
|
|||||||
middleware: Vec<Arc<dyn ResponseMiddleware + Send + Sync>>,
|
middleware: Vec<Arc<dyn ResponseMiddleware + Send + Sync>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HttpRequestBuilder {
|
impl std::default::Default for HttpRequestBuilder {
|
||||||
pub fn new() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self {
|
||||||
url: "".to_owned(),
|
url: "".to_owned(),
|
||||||
body: None,
|
body: None,
|
||||||
@ -34,6 +34,10 @@ impl HttpRequestBuilder {
|
|||||||
middleware: Vec::new(),
|
middleware: Vec::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HttpRequestBuilder {
|
||||||
|
pub fn new() -> Self { HttpRequestBuilder::default() }
|
||||||
|
|
||||||
pub fn middleware<T>(mut self, middleware: Arc<T>) -> Self
|
pub fn middleware<T>(mut self, middleware: Arc<T>) -> Self
|
||||||
where
|
where
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#![allow(clippy::module_inception)]
|
||||||
mod response;
|
mod response;
|
||||||
|
|
||||||
#[cfg(feature = "http_server")]
|
#[cfg(feature = "http_server")]
|
||||||
|
@ -118,7 +118,7 @@ impl<'a> ASTField<'a> {
|
|||||||
Some(inner) => {
|
Some(inner) => {
|
||||||
match inner.primitive_ty {
|
match inner.primitive_ty {
|
||||||
PrimitiveTy::Map(map_info) => {
|
PrimitiveTy::Map(map_info) => {
|
||||||
bracket_category = Some(BracketCategory::Map((map_info.key.clone(), map_info.value.clone())))
|
bracket_category = Some(BracketCategory::Map((map_info.key.clone(), map_info.value)))
|
||||||
},
|
},
|
||||||
PrimitiveTy::Vec => {
|
PrimitiveTy::Vec => {
|
||||||
bracket_category = Some(BracketCategory::Vec);
|
bracket_category = Some(BracketCategory::Vec);
|
||||||
@ -170,7 +170,7 @@ impl<'a> ASTField<'a> {
|
|||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub fn name(&self) -> Option<syn::Ident> {
|
pub fn name(&self) -> Option<syn::Ident> {
|
||||||
if let syn::Member::Named(ident) = &self.member {
|
if let syn::Member::Named(ident) = &self.member {
|
||||||
return Some(ident.clone());
|
Some(ident.clone())
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
@ -205,7 +205,7 @@ pub fn enum_from_ast<'a>(
|
|||||||
cx: &Ctxt,
|
cx: &Ctxt,
|
||||||
ident: &syn::Ident,
|
ident: &syn::Ident,
|
||||||
variants: &'a Punctuated<syn::Variant, Token![,]>,
|
variants: &'a Punctuated<syn::Variant, Token![,]>,
|
||||||
enum_attrs: &Vec<syn::Attribute>,
|
enum_attrs: &[syn::Attribute],
|
||||||
) -> Vec<ASTEnumVariant<'a>> {
|
) -> Vec<ASTEnumVariant<'a>> {
|
||||||
variants
|
variants
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
|
#![allow(clippy::all)]
|
||||||
use crate::{symbol::*, Ctxt};
|
use crate::{symbol::*, Ctxt};
|
||||||
|
|
||||||
use quote::ToTokens;
|
use quote::ToTokens;
|
||||||
use syn::{
|
use syn::{
|
||||||
self,
|
self,
|
||||||
@ -194,7 +194,7 @@ impl ASTAttrField {
|
|||||||
}
|
}
|
||||||
|
|
||||||
ASTAttrField {
|
ASTAttrField {
|
||||||
name: ident.to_string().clone(),
|
name: ident,
|
||||||
pb_index: pb_index.get(),
|
pb_index: pb_index.get(),
|
||||||
pb_one_of: pb_one_of.get(),
|
pb_one_of: pb_one_of.get(),
|
||||||
skip_serializing: skip_serializing.get(),
|
skip_serializing: skip_serializing.get(),
|
||||||
@ -205,12 +205,7 @@ impl ASTAttrField {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub fn pb_index(&self) -> Option<String> {
|
pub fn pb_index(&self) -> Option<String> { self.pb_index.as_ref().map(|lit| lit.base10_digits().to_string()) }
|
||||||
match self.pb_index {
|
|
||||||
Some(ref lit) => Some(lit.base10_digits().to_string()),
|
|
||||||
None => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_one_of(&self) -> bool { self.pb_one_of }
|
pub fn is_one_of(&self) -> bool { self.pb_one_of }
|
||||||
|
|
||||||
@ -249,21 +244,18 @@ pub struct ASTEnumAttrVariant {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ASTEnumAttrVariant {
|
impl ASTEnumAttrVariant {
|
||||||
pub fn from_ast(ctxt: &Ctxt, ident: &syn::Ident, variant: &syn::Variant, enum_attrs: &Vec<syn::Attribute>) -> Self {
|
pub fn from_ast(ctxt: &Ctxt, ident: &syn::Ident, variant: &syn::Variant, enum_attrs: &[syn::Attribute]) -> Self {
|
||||||
let enum_item_name = variant.ident.to_string();
|
let enum_item_name = variant.ident.to_string();
|
||||||
let enum_name = ident.to_string();
|
let enum_name = ident.to_string();
|
||||||
let mut value = String::new();
|
let mut value = String::new();
|
||||||
if variant.discriminant.is_some() {
|
if variant.discriminant.is_some() {
|
||||||
match variant.discriminant.as_ref().unwrap().1 {
|
if let syn::Expr::Lit(ref expr_list) = variant.discriminant.as_ref().unwrap().1 {
|
||||||
syn::Expr::Lit(ref expr_list) => {
|
let lit_int = if let syn::Lit::Int(ref int_value) = expr_list.lit {
|
||||||
let lit_int = if let syn::Lit::Int(ref int_value) = expr_list.lit {
|
int_value
|
||||||
int_value
|
} else {
|
||||||
} else {
|
unimplemented!()
|
||||||
unimplemented!()
|
};
|
||||||
};
|
value = lit_int.base10_digits().to_string();
|
||||||
value = lit_int.base10_digits().to_string();
|
|
||||||
},
|
|
||||||
_ => {},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let event_attrs = get_event_attrs_from(ctxt, &variant.attrs, enum_attrs);
|
let event_attrs = get_event_attrs_from(ctxt, &variant.attrs, enum_attrs);
|
||||||
@ -282,11 +274,7 @@ impl ASTEnumAttrVariant {
|
|||||||
pub fn event_error(&self) -> String { self.event_attrs.error_ty.as_ref().unwrap().clone() }
|
pub fn event_error(&self) -> String { self.event_attrs.error_ty.as_ref().unwrap().clone() }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_event_attrs_from(
|
fn get_event_attrs_from(ctxt: &Ctxt, variant_attrs: &[syn::Attribute], enum_attrs: &[syn::Attribute]) -> EventAttrs {
|
||||||
ctxt: &Ctxt,
|
|
||||||
variant_attrs: &Vec<syn::Attribute>,
|
|
||||||
enum_attrs: &Vec<syn::Attribute>,
|
|
||||||
) -> EventAttrs {
|
|
||||||
let mut event_attrs = EventAttrs {
|
let mut event_attrs = EventAttrs {
|
||||||
input: None,
|
input: None,
|
||||||
output: None,
|
output: None,
|
||||||
@ -296,7 +284,7 @@ fn get_event_attrs_from(
|
|||||||
|
|
||||||
enum_attrs
|
enum_attrs
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|attr| attr.path.segments.iter().find(|s| s.ident == EVENT_ERR).is_some())
|
.filter(|attr| attr.path.segments.iter().any(|s| s.ident == EVENT_ERR))
|
||||||
.for_each(|attr| {
|
.for_each(|attr| {
|
||||||
if let Ok(NameValue(named_value)) = attr.parse_meta() {
|
if let Ok(NameValue(named_value)) = attr.parse_meta() {
|
||||||
if let syn::Lit::Str(s) = named_value.lit {
|
if let syn::Lit::Str(s) = named_value.lit {
|
||||||
@ -357,13 +345,12 @@ fn get_event_attrs_from(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// eprintln!("😁{:#?}", event_attrs);
|
// eprintln!("😁{:#?}", event_attrs);
|
||||||
|
|
||||||
event_attrs
|
event_attrs
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_meta_items(cx: &Ctxt, attr: &syn::Attribute) -> Result<Vec<syn::NestedMeta>, ()> {
|
pub fn get_meta_items(cx: &Ctxt, attr: &syn::Attribute) -> Result<Vec<syn::NestedMeta>, ()> {
|
||||||
if attr.path != PB_ATTRS && attr.path != EVENT {
|
if attr.path != PB_ATTRS && attr.path != EVENT {
|
||||||
return Ok(Vec::new());
|
return Ok(vec![]);
|
||||||
}
|
}
|
||||||
|
|
||||||
// http://strymon.systems.ethz.ch/typename/syn/enum.Meta.html
|
// http://strymon.systems.ethz.ch/typename/syn/enum.Meta.html
|
||||||
@ -438,7 +425,7 @@ fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn default_pb_type(ctxt: &Ctxt, ident: &syn::Ident) -> syn::Type {
|
fn default_pb_type(ctxt: &Ctxt, ident: &syn::Ident) -> syn::Type {
|
||||||
let take_ident = format!("{}", ident.to_string());
|
let take_ident = ident.to_string();
|
||||||
let lit_str = syn::LitStr::new(&take_ident, ident.span());
|
let lit_str = syn::LitStr::new(&take_ident, ident.span());
|
||||||
if let Ok(tokens) = spanned_tokens(&lit_str) {
|
if let Ok(tokens) = spanned_tokens(&lit_str) {
|
||||||
if let Ok(pb_struct_ty) = syn::parse2(tokens) {
|
if let Ok(pb_struct_ty) = syn::parse2(tokens) {
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
use quote::ToTokens;
|
use quote::ToTokens;
|
||||||
use std::{cell::RefCell, fmt::Display, thread};
|
use std::{cell::RefCell, fmt::Display, thread};
|
||||||
use syn;
|
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct Ctxt {
|
pub struct Ctxt {
|
||||||
|
@ -17,7 +17,7 @@ impl EventASTContext {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let event = format_ident!("{}", &command_name);
|
let event = format_ident!("{}", &command_name);
|
||||||
let splits = command_name.split("_").collect::<Vec<&str>>();
|
let splits = command_name.split('_').collect::<Vec<&str>>();
|
||||||
|
|
||||||
let event_ty = format_ident!("{}", variant.enum_name);
|
let event_ty = format_ident!("{}", variant.enum_name);
|
||||||
let event_request_struct = format_ident!("{}Event", &splits.join(""));
|
let event_request_struct = format_ident!("{}Event", &splits.join(""));
|
||||||
|
@ -72,7 +72,7 @@ pub fn parse_ty<'a>(ctxt: &Ctxt, ty: &'a syn::Type) -> Option<TyInfo<'a>> {
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
ctxt.error_spanned_by(ty, format!("Unsupported inner type, get inner type fail"));
|
ctxt.error_spanned_by(ty, "Unsupported inner type, get inner type fail".to_string());
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -104,12 +104,12 @@ pub fn generate_hashmap_ty_info<'a>(
|
|||||||
let key = parse_ty(ctxt, types[0]).unwrap().ident.to_string();
|
let key = parse_ty(ctxt, types[0]).unwrap().ident.to_string();
|
||||||
let value = parse_ty(ctxt, types[1]).unwrap().ident.to_string();
|
let value = parse_ty(ctxt, types[1]).unwrap().ident.to_string();
|
||||||
let bracket_ty_info = Box::new(parse_ty(ctxt, &types[1]));
|
let bracket_ty_info = Box::new(parse_ty(ctxt, &types[1]));
|
||||||
return Some(TyInfo {
|
Some(TyInfo {
|
||||||
ident: &path_segment.ident,
|
ident: &path_segment.ident,
|
||||||
ty,
|
ty,
|
||||||
primitive_ty: PrimitiveTy::Map(MapInfo::new(key, value)),
|
primitive_ty: PrimitiveTy::Map(MapInfo::new(key, value)),
|
||||||
bracket_ty_info,
|
bracket_ty_info,
|
||||||
});
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generate_option_ty_info<'a>(
|
fn generate_option_ty_info<'a>(
|
||||||
@ -121,12 +121,12 @@ fn generate_option_ty_info<'a>(
|
|||||||
assert_eq!(path_segment.ident.to_string(), "Option".to_string());
|
assert_eq!(path_segment.ident.to_string(), "Option".to_string());
|
||||||
let types = parse_bracketed(bracketed);
|
let types = parse_bracketed(bracketed);
|
||||||
let bracket_ty_info = Box::new(parse_ty(ctxt, &types[0]));
|
let bracket_ty_info = Box::new(parse_ty(ctxt, &types[0]));
|
||||||
return Some(TyInfo {
|
Some(TyInfo {
|
||||||
ident: &path_segment.ident,
|
ident: &path_segment.ident,
|
||||||
ty,
|
ty,
|
||||||
primitive_ty: PrimitiveTy::Opt,
|
primitive_ty: PrimitiveTy::Opt,
|
||||||
bracket_ty_info,
|
bracket_ty_info,
|
||||||
});
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generate_vec_ty_info<'a>(
|
fn generate_vec_ty_info<'a>(
|
||||||
@ -146,5 +146,5 @@ fn generate_vec_ty_info<'a>(
|
|||||||
bracket_ty_info: bracketed_ty_info,
|
bracket_ty_info: bracketed_ty_info,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return None;
|
None
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#![allow(clippy::module_inception)]
|
||||||
mod derive_cache;
|
mod derive_cache;
|
||||||
|
|
||||||
pub use derive_cache::*;
|
pub use derive_cache::*;
|
||||||
|
@ -38,8 +38,7 @@ pub fn make_de_token_steam(ctxt: &Ctxt, ast: &ASTContainer) -> Option<TokenStrea
|
|||||||
Ok(o)
|
Ok(o)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
.into();
|
|
||||||
|
|
||||||
Some(de_token_stream)
|
Some(de_token_stream)
|
||||||
// None
|
// None
|
||||||
|
@ -20,14 +20,12 @@ pub fn expand_derive(input: &syn::DeriveInput) -> Result<TokenStream, Vec<syn::E
|
|||||||
|
|
||||||
let mut token_stream: TokenStream = TokenStream::default();
|
let mut token_stream: TokenStream = TokenStream::default();
|
||||||
|
|
||||||
let de_token_stream = make_de_token_steam(&ctxt, &cont);
|
if let Some(de_token_stream) = make_de_token_steam(&ctxt, &cont) {
|
||||||
if de_token_stream.is_some() {
|
token_stream.extend(de_token_stream);
|
||||||
token_stream.extend(de_token_stream.unwrap());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let se_token_stream = make_se_token_stream(&ctxt, &cont);
|
if let Some(se_token_stream) = make_se_token_stream(&ctxt, &cont) {
|
||||||
if se_token_stream.is_some() {
|
token_stream.extend(se_token_stream);
|
||||||
token_stream.extend(se_token_stream.unwrap());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ctxt.check()?;
|
ctxt.check()?;
|
||||||
@ -43,9 +41,8 @@ pub fn expand_enum_derive(input: &syn::DeriveInput) -> Result<TokenStream, Vec<s
|
|||||||
|
|
||||||
let mut token_stream: TokenStream = TokenStream::default();
|
let mut token_stream: TokenStream = TokenStream::default();
|
||||||
|
|
||||||
let enum_token_stream = make_enum_token_stream(&ctxt, &cont);
|
if let Some(enum_token_stream) = make_enum_token_stream(&ctxt, &cont) {
|
||||||
if enum_token_stream.is_some() {
|
token_stream.extend(enum_token_stream);
|
||||||
token_stream.extend(enum_token_stream.unwrap());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ctxt.check()?;
|
ctxt.check()?;
|
||||||
|
@ -35,8 +35,7 @@ pub fn make_se_token_stream(ctxt: &Ctxt, ast: &ASTContainer) -> Option<TokenStre
|
|||||||
Ok(pb)
|
Ok(pb)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
.into();
|
|
||||||
|
|
||||||
Some(se_token_stream)
|
Some(se_token_stream)
|
||||||
}
|
}
|
||||||
|
@ -10,13 +10,13 @@ pub(crate) fn get_member_ident<'a>(ctxt: &Ctxt, member: &'a syn::Member) -> Opti
|
|||||||
if let syn::Member::Named(ref ident) = member {
|
if let syn::Member::Named(ref ident) = member {
|
||||||
Some(ident)
|
Some(ident)
|
||||||
} else {
|
} else {
|
||||||
ctxt.error_spanned_by(member, format!("Unsupported member, shouldn't be self.0"));
|
ctxt.error_spanned_by(member, "Unsupported member, shouldn't be self.0".to_string());
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn assert_bracket_ty_is_some(ctxt: &Ctxt, ty_info: &TyInfo) {
|
pub fn assert_bracket_ty_is_some(ctxt: &Ctxt, ty_info: &TyInfo) {
|
||||||
if ty_info.bracket_ty_info.is_none() {
|
if ty_info.bracket_ty_info.is_none() {
|
||||||
ctxt.error_spanned_by(ty_info.ty, format!("Invalid bracketed type when gen de token steam"));
|
ctxt.error_spanned_by(ty_info.ty, "Invalid bracketed type when gen de token steam".to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -125,7 +125,7 @@ impl Document {
|
|||||||
pub fn format(&mut self, interval: Interval, attribute: Attribute) -> Result<Delta, DocumentError> {
|
pub fn format(&mut self, interval: Interval, attribute: Attribute) -> Result<Delta, DocumentError> {
|
||||||
let _ = validate_interval(&self.delta, &interval)?;
|
let _ = validate_interval(&self.delta, &interval)?;
|
||||||
tracing::trace!("format with {} at {}", attribute, interval);
|
tracing::trace!("format with {} at {}", attribute, interval);
|
||||||
let format_delta = self.view.format(&self.delta, attribute.clone(), interval).unwrap();
|
let format_delta = self.view.format(&self.delta, attribute, interval).unwrap();
|
||||||
|
|
||||||
tracing::trace!("👉 receive change: {}", format_delta);
|
tracing::trace!("👉 receive change: {}", format_delta);
|
||||||
self.compose_delta(format_delta.clone())?;
|
self.compose_delta(format_delta.clone())?;
|
||||||
|
@ -54,7 +54,7 @@ impl InsertExt for PreserveBlockFormatOnInsert {
|
|||||||
new_delta.retain(offset, plain_attributes());
|
new_delta.retain(offset, plain_attributes());
|
||||||
let len = newline_op.get_data().find(NEW_LINE).unwrap();
|
let len = newline_op.get_data().find(NEW_LINE).unwrap();
|
||||||
new_delta.retain(len, plain_attributes());
|
new_delta.retain(len, plain_attributes());
|
||||||
new_delta.retain(1, reset_attribute.clone());
|
new_delta.retain(1, reset_attribute);
|
||||||
}
|
}
|
||||||
|
|
||||||
return Some(new_delta);
|
return Some(new_delta);
|
||||||
|
@ -44,7 +44,7 @@ impl InsertExt for PreserveInlineFormat {
|
|||||||
.insert_with_attributes(text, attributes)
|
.insert_with_attributes(text, attributes)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
return Some(new_delta);
|
Some(new_delta)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -73,7 +73,7 @@ impl InsertExt for PreserveLineFormatOnSplit {
|
|||||||
new_delta.retain(index + replace_len, plain_attributes());
|
new_delta.retain(index + replace_len, plain_attributes());
|
||||||
|
|
||||||
if newline_status.is_contain() {
|
if newline_status.is_contain() {
|
||||||
debug_assert!(next.has_attribute() == false);
|
debug_assert!(!next.has_attribute());
|
||||||
new_delta.insert(NEW_LINE, plain_attributes());
|
new_delta.insert(NEW_LINE, plain_attributes());
|
||||||
return Some(new_delta);
|
return Some(new_delta);
|
||||||
}
|
}
|
||||||
|
@ -26,8 +26,8 @@ pub struct History {
|
|||||||
capacity: usize,
|
capacity: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl History {
|
impl std::default::Default for History {
|
||||||
pub fn new() -> Self {
|
fn default() -> Self {
|
||||||
History {
|
History {
|
||||||
cur_undo: 1,
|
cur_undo: 1,
|
||||||
undos: Vec::new(),
|
undos: Vec::new(),
|
||||||
@ -35,6 +35,10 @@ impl History {
|
|||||||
capacity: MAX_UNDOS,
|
capacity: MAX_UNDOS,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl History {
|
||||||
|
pub fn new() -> Self { History::default() }
|
||||||
|
|
||||||
pub fn can_undo(&self) -> bool { !self.undos.is_empty() }
|
pub fn can_undo(&self) -> bool { !self.undos.is_empty() }
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#![allow(clippy::module_inception)]
|
||||||
mod doc;
|
mod doc;
|
||||||
pub mod parser;
|
pub mod parser;
|
||||||
mod revision;
|
mod revision;
|
||||||
|
@ -4,7 +4,7 @@ pub struct DocId(pub String);
|
|||||||
impl DocId {
|
impl DocId {
|
||||||
pub fn parse(s: String) -> Result<DocId, String> {
|
pub fn parse(s: String) -> Result<DocId, String> {
|
||||||
if s.trim().is_empty() {
|
if s.trim().is_empty() {
|
||||||
return Err(format!("Doc id can not be empty or whitespace"));
|
return Err("Doc id can not be empty or whitespace".to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Self(s))
|
Ok(Self(s))
|
||||||
|
@ -45,8 +45,8 @@ impl AsRef<i64> for RevId {
|
|||||||
fn as_ref(&self) -> &i64 { &self.value }
|
fn as_ref(&self) -> &i64 { &self.value }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::convert::Into<i64> for RevId {
|
impl std::convert::From<RevId> for i64 {
|
||||||
fn into(self) -> i64 { self.value }
|
fn from(rev_id: RevId) -> Self { rev_id.value }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::convert::From<i64> for RevId {
|
impl std::convert::From<i64> for RevId {
|
||||||
@ -129,14 +129,7 @@ impl Revision {
|
|||||||
|
|
||||||
pub fn revision_from_doc(doc: Doc, ty: RevType) -> Revision {
|
pub fn revision_from_doc(doc: Doc, ty: RevType) -> Revision {
|
||||||
let delta_data = doc.data.as_bytes();
|
let delta_data = doc.data.as_bytes();
|
||||||
let revision = Revision::new(
|
Revision::new(doc.base_rev_id, doc.rev_id, delta_data.to_owned(), &doc.id, ty)
|
||||||
doc.base_rev_id.clone(),
|
|
||||||
doc.rev_id.clone(),
|
|
||||||
delta_data.to_owned(),
|
|
||||||
&doc.id,
|
|
||||||
ty,
|
|
||||||
);
|
|
||||||
revision
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default, ProtoBuf)]
|
#[derive(Debug, Clone, Default, ProtoBuf)]
|
||||||
@ -161,6 +154,8 @@ impl RevisionRange {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn is_empty(&self) -> bool { self.end == self.start }
|
||||||
|
|
||||||
pub fn iter(&self) -> RangeInclusive<i64> {
|
pub fn iter(&self) -> RangeInclusive<i64> {
|
||||||
debug_assert!(self.start != self.end);
|
debug_assert!(self.start != self.end);
|
||||||
RangeInclusive::new(self.start, self.end)
|
RangeInclusive::new(self.start, self.end)
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#![allow(clippy::module_inception)]
|
||||||
mod ws;
|
mod ws;
|
||||||
|
|
||||||
pub use ws::*;
|
pub use ws::*;
|
||||||
|
@ -9,8 +9,7 @@ pub fn doc_initial_string() -> String { doc_initial_delta().to_json() }
|
|||||||
#[inline]
|
#[inline]
|
||||||
pub fn initial_read_me() -> Delta {
|
pub fn initial_read_me() -> Delta {
|
||||||
let json = include_str!("READ_ME.json");
|
let json = include_str!("READ_ME.json");
|
||||||
let delta = Delta::from_json(json).unwrap();
|
Delta::from_json(json).unwrap()
|
||||||
delta
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -2,12 +2,7 @@ use lib_ot::core::{NEW_LINE, WHITESPACE};
|
|||||||
use std::sync::atomic::{AtomicI64, Ordering::SeqCst};
|
use std::sync::atomic::{AtomicI64, Ordering::SeqCst};
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn find_newline(s: &str) -> Option<usize> {
|
pub fn find_newline(s: &str) -> Option<usize> { s.find(NEW_LINE) }
|
||||||
match s.find(NEW_LINE) {
|
|
||||||
None => None,
|
|
||||||
Some(line_break) => Some(line_break),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn is_newline(s: &str) -> bool { s == NEW_LINE }
|
pub fn is_newline(s: &str) -> bool { s == NEW_LINE }
|
||||||
|
@ -68,7 +68,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn names_containing_an_invalid_character_are_rejected() {
|
fn names_containing_an_invalid_character_are_rejected() {
|
||||||
for name in vec!['/', '(', ')', '"', '<', '>', '\\', '{', '}'] {
|
for name in &['/', '(', ')', '"', '<', '>', '\\', '{', '}'] {
|
||||||
let name = name.to_string();
|
let name = name.to_string();
|
||||||
assert_err!(UserName::parse(name));
|
assert_err!(UserName::parse(name));
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,7 @@ impl UserWorkspace {
|
|||||||
pub fn parse(s: String) -> Result<UserWorkspace, String> {
|
pub fn parse(s: String) -> Result<UserWorkspace, String> {
|
||||||
let is_empty_or_whitespace = s.trim().is_empty();
|
let is_empty_or_whitespace = s.trim().is_empty();
|
||||||
if is_empty_or_whitespace {
|
if is_empty_or_whitespace {
|
||||||
return Err(format!("workspace id is empty or whitespace"));
|
return Err("workspace id is empty or whitespace".to_string());
|
||||||
}
|
}
|
||||||
Ok(Self(s))
|
Ok(Self(s))
|
||||||
}
|
}
|
||||||
|
@ -99,9 +99,7 @@ pub struct App {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl App {
|
impl App {
|
||||||
pub fn take_belongings(&mut self) -> RepeatedView {
|
pub fn take_belongings(&mut self) -> RepeatedView { std::mem::take(&mut self.belongings) }
|
||||||
::std::mem::replace(&mut self.belongings, RepeatedView::default())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(PartialEq, Debug, Default, ProtoBuf, Clone)]
|
#[derive(PartialEq, Debug, Default, ProtoBuf, Clone)]
|
||||||
|
@ -79,7 +79,7 @@ impl TryInto<UpdateAppParams> for UpdateAppRequest {
|
|||||||
|
|
||||||
let color_style = match self.color_style {
|
let color_style = match self.color_style {
|
||||||
None => None,
|
None => None,
|
||||||
Some(color_style) => Some(AppColorStyle::parse(color_style.theme_color.clone())?.into()),
|
Some(color_style) => Some(AppColorStyle::parse(color_style.theme_color)?.into()),
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(UpdateAppParams {
|
Ok(UpdateAppParams {
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user