save user info to pg && response to client with protobuf data

This commit is contained in:
appflowy 2021-08-22 15:32:48 +08:00
parent 3f65d3eb48
commit 66c4daab7a
33 changed files with 328 additions and 279 deletions

22
.run/ProtoBuf_Gen.run.xml Normal file
View File

@ -0,0 +1,22 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="ProtoBuf_Gen" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
<option name="command" value="run --manifest-path $PROJECT_DIR$/scripts/flowy-tool/Cargo.toml -- pb-gen --rust_source=$PROJECT_DIR$/rust-lib/ --derive_meta=$PROJECT_DIR$/rust-lib/flowy-derive/src/derive_cache/derive_cache.rs --flutter_package_lib=$PROJECT_DIR$/app_flowy/packages/flowy_sdk/lib" />
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
<option name="channel" value="DEFAULT" />
<option name="allFeatures" value="false" />
<option name="emulateTerminal" value="false" />
<option name="backtrace" value="SHORT" />
<envs>
<env name="rust_source" value="${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/rust-lib/" />
<env name="build_cache" value="${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/rust-lib/flowy-derive/src/auto_gen_file/category_from_str.rs" />
<env name="proto_file_output" value="${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/rust-lib/flowy-protobuf/define" />
<env name="rust_mod_dir" value="${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/rust-lib/flowy-protobuf/src/" />
<env name="flutter_mod_dir" value="${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/flutter-lib/packages/flowy_protobuf/lib/" />
</envs>
<option name="isRedirectInput" value="false" />
<option name="redirectInputPath" value="" />
<method v="2">
<option name="CARGO.BUILD_TASK_PROVIDER" enabled="true" />
</method>
</configuration>
</component>

18
.run/Run backend.run.xml Normal file
View File

@ -0,0 +1,18 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="Run backend" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
<option name="command" value="run --package backend --bin backend" />
<option name="workingDirectory" value="file://$PROJECT_DIR$/backend" />
<option name="channel" value="DEFAULT" />
<option name="allFeatures" value="false" />
<option name="emulateTerminal" value="false" />
<option name="backtrace" value="SHORT" />
<envs>
<env name="APP_ENVIRONMENT" value="production" />
</envs>
<option name="isRedirectInput" value="false" />
<option name="redirectInputPath" value="" />
<method v="2">
<option name="CARGO.BUILD_TASK_PROVIDER" enabled="true" />
</method>
</configuration>
</component>

16
.run/dart-event.run.xml Normal file
View File

@ -0,0 +1,16 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="dart-event" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
<option name="command" value="run --manifest-path $PROJECT_DIR$/scripts/flowy-tool/Cargo.toml -- dart-event --rust_source=$PROJECT_DIR$/rust-lib/ --output=$PROJECT_DIR$/app_flowy/packages/flowy_sdk/lib/dispatch/code_gen.dart" />
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
<option name="channel" value="DEFAULT" />
<option name="allFeatures" value="false" />
<option name="emulateTerminal" value="false" />
<option name="backtrace" value="SHORT" />
<envs />
<option name="isRedirectInput" value="false" />
<option name="redirectInputPath" value="" />
<method v="2">
<option name="CARGO.BUILD_TASK_PROVIDER" enabled="true" />
</method>
</configuration>
</component>

View File

@ -8,3 +8,4 @@ Dockerfile
scripts/ scripts/
migrations/ migrations/
app_flowy/ app_flowy/
rust-lib/target/

View File

@ -1,15 +1,58 @@
# We use the latest Rust stable release as base image
FROM rust:1.53.0 FROM rust:1.53.0
# Let's switch our working directory to `app` (equivalent to `cd app`) # Let's switch our working directory to `app` (equivalent to `cd app`)
# The `app` folder will be created for us by Docker in case it does not # The `app` folder will be created for us by Docker in case it does not
# exist already. # exist already.
WORKDIR /app WORKDIR /app
# Copy all files from our working environment to our Docker image
COPY . . COPY . .
# Let's build our binary!
# We'll use the release profile to make it fast
WORKDIR /app/backend WORKDIR /app/backend
ENV SQLX_OFFLINE true
ENV APP_ENVIRONMENT production
RUN cargo build --release RUN cargo build --release
# When `docker run` is executed, launch the binary! # When `docker run` is executed, launch the binary!
ENTRYPOINT ["./target/release/backend"] ENTRYPOINT ["./target/release/backend"]
#
## We use the latest Rust stable release as base image
#FROM lukemathwalker/cargo-chef:latest-rust-1.53.0 as planner
#WORKDIR /app
#COPY . .
#
#WORKDIR /app/backend
#RUN cargo chef prepare --recipe-path recipe.json
#
#FROM lukemathwalker/cargo-chef:latest-rust-1.53.0 as cacher
#WORKDIR /app/backend
#COPY --from=planner /app/backend/recipe.json recipe.json
## Build our project dependencies, not our application!
#RUN cargo chef cook --release --recipe-path recipe.json
#
#FROM rust:1.53.0 AS builder
#WORKDIR /app/backend
## Copy over the cached dependencies
#COPY --from=cacher /app/backend/target target
#COPY --from=cacher /usr/local/cargo /usr/local/cargo
#COPY . .
#
#ENV SQLX_OFFLINE true
#RUN cargo build --release --bin backend
#
#
#FROM debian:buster-slim AS runtime
#WORKDIR /app/backend
#RUN apt-get update -y \
# && apt-get install -y --no-install-recommends openssl \
# # Clean up
# && apt-get autoremove -y \
# && apt-get clean -y \
# && rm -rf /var/lib/apt/lists/*
#COPY --from=builder /app/backend/target/release/backend backend
##COPY configuration configuration
#ENV APP_ENVIRONMENT production
#ENTRYPOINT ["./backend"]

View File

@ -1,4 +1,4 @@
application: application:
host: 0.0.0.0 host: 0.0.0.0
database: database:
require_ssl: true require_ssl: false

View File

@ -28,3 +28,7 @@ export DB_PORT=5433
![img_1.png](img_1.png) ![img_1.png](img_1.png)
[Docker command](https://docs.docker.com/engine/reference/commandline/builder_prune/) [Docker command](https://docs.docker.com/engine/reference/commandline/builder_prune/)
### Run
By default, Docker images do not expose their ports to the underlying host machine. We need to do it explicitly using the -p flag.
`docker run -p 8000:8000 backend`

View File

@ -63,7 +63,10 @@ async fn init_app_context(configuration: &Settings) -> Arc<AppContext> {
let pg_pool = Arc::new( let pg_pool = Arc::new(
get_connection_pool(&configuration.database) get_connection_pool(&configuration.database)
.await .await
.expect("Failed to connect to Postgres."), .expect(&format!(
"Failed to connect to Postgres {:?}.",
configuration.database
)),
); );
let ws_server = WSServer::new().start(); let ws_server = WSServer::new().start();
@ -77,7 +80,7 @@ async fn init_app_context(configuration: &Settings) -> Arc<AppContext> {
pub async fn get_connection_pool(configuration: &DatabaseSettings) -> Result<PgPool, sqlx::Error> { pub async fn get_connection_pool(configuration: &DatabaseSettings) -> Result<PgPool, sqlx::Error> {
PgPoolOptions::new() PgPoolOptions::new()
.connect_timeout(std::time::Duration::from_secs(2)) .connect_timeout(std::time::Duration::from_secs(5))
.connect_with(configuration.with_db()) .connect_with(configuration.with_db())
.await .await
} }

View File

@ -8,15 +8,22 @@ pub struct Settings {
pub application: ApplicationSettings, pub application: ApplicationSettings,
} }
// We are using 127.0.0.1 as our host in address, we are instructing our
// application to only accept connections coming from the same machine. However,
// request from the hose machine which is not seen as local by our Docker image.
//
// Using 0.0.0.0 as host to instruct our application to accept connections from
// any network interface. So using 127.0.0.1 for our local development and set
// it to 0.0.0.0 in our Docker images.
//
#[derive(serde::Deserialize, Clone)] #[derive(serde::Deserialize, Clone)]
pub struct ApplicationSettings { pub struct ApplicationSettings {
#[serde(deserialize_with = "deserialize_number_from_string")] #[serde(deserialize_with = "deserialize_number_from_string")]
pub port: u16, pub port: u16,
pub host: String, pub host: String,
pub base_url: String,
} }
#[derive(serde::Deserialize, Clone)] #[derive(serde::Deserialize, Clone, Debug)]
pub struct DatabaseSettings { pub struct DatabaseSettings {
pub username: String, pub username: String,
pub password: String, pub password: String,

View File

@ -14,11 +14,9 @@ pub async fn register(
_request: HttpRequest, _request: HttpRequest,
payload: Payload, payload: Payload,
auth: Data<Arc<Auth>>, auth: Data<Arc<Auth>>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, ServerError> {
let params: SignUpParams = parse_from_payload(payload).await?; let params: SignUpParams = parse_from_payload(payload).await?;
let _ = auth.sign_up(params).await?; let resp = auth.sign_up(params).await?;
let resp = FlowyResponse::success();
Ok(resp.into()) Ok(resp.into())
} }

View File

@ -1,5 +1,5 @@
use chrono::Utc; use chrono::Utc;
use flowy_net::response::{ServerCode, ServerError}; use flowy_net::response::{FlowyResponse, ServerCode, ServerError};
use flowy_user::{entities::SignUpResponse, protobuf::SignUpParams}; use flowy_user::{entities::SignUpResponse, protobuf::SignUpParams};
use sqlx::PgPool; use sqlx::PgPool;
use std::sync::Arc; use std::sync::Arc;
@ -11,15 +11,16 @@ pub struct Auth {
impl Auth { impl Auth {
pub fn new(db_pool: Arc<PgPool>) -> Self { Self { db_pool } } pub fn new(db_pool: Arc<PgPool>) -> Self { Self { db_pool } }
pub async fn sign_up(&self, params: SignUpParams) -> Result<SignUpResponse, ServerError> { pub async fn sign_up(&self, params: SignUpParams) -> Result<FlowyResponse, ServerError> {
// email exist? // email exist?
// generate user id // generate user id
let uuid = uuid::Uuid::new_v4();
let result = sqlx::query!( let result = sqlx::query!(
r#" r#"
INSERT INTO user_table (id, email, name, create_time, password) INSERT INTO user_table (id, email, name, create_time, password)
VALUES ($1, $2, $3, $4, $5) VALUES ($1, $2, $3, $4, $5)
"#, "#,
uuid::Uuid::new_v4(), uuid,
params.email, params.email,
params.name, params.name,
Utc::now(), Utc::now(),
@ -28,11 +29,14 @@ impl Auth {
.execute(self.db_pool.as_ref()) .execute(self.db_pool.as_ref())
.await; .await;
let response = SignUpResponse { let data = SignUpResponse {
uid: "".to_string(), uid: uuid.to_string(),
name: "".to_string(), name: params.name,
email: "".to_string(), email: params.email,
}; };
let response = FlowyResponse::from(data, "", ServerCode::Success)?;
Ok(response) Ok(response)
} }

View File

@ -19,5 +19,7 @@ members = [
"flowy-net", "flowy-net",
] ]
exclude = ["../backend"]
[profile.dev] [profile.dev]
split-debuginfo = "unpacked" split-debuginfo = "unpacked"

View File

@ -1,6 +1,6 @@
use crate::errors::{DispatchError, InternalError}; use crate::errors::{DispatchError, InternalError};
use bytes::Bytes; use bytes::Bytes;
use protobuf::ProtobufError;
use std::convert::TryFrom; use std::convert::TryFrom;
// To bytes // To bytes

View File

@ -5,7 +5,7 @@ use crate::{
}; };
use bytes::Bytes; use bytes::Bytes;
use dyn_clone::DynClone; use dyn_clone::DynClone;
use protobuf::ProtobufError;
use serde::{Serialize, Serializer}; use serde::{Serialize, Serializer};
use std::fmt; use std::fmt;
use tokio::{sync::mpsc::error::SendError, task::JoinError}; use tokio::{sync::mpsc::error::SendError, task::JoinError};

View File

@ -1,7 +1,7 @@
use crate::{ use crate::{
byte_trait::FromBytes, byte_trait::FromBytes,
data::Data, data::Data,
errors::{DispatchError, InternalError}, errors::DispatchError,
request::{EventRequest, Payload}, request::{EventRequest, Payload},
response::Responder, response::Responder,
}; };

View File

@ -1,6 +1,6 @@
use lazy_static::lazy_static; use lazy_static::lazy_static;
pub const HOST: &'static str = "http://0.0.0.0:3030"; pub const HOST: &'static str = "http://localhost:8000";
lazy_static! { lazy_static! {
pub static ref SIGN_UP_URL: String = format!("{}/user/register", HOST); pub static ref SIGN_UP_URL: String = format!("{}/user/register", HOST);

View File

@ -1,45 +0,0 @@
use crate::response::FlowyResponse;
use protobuf::ProtobufError;
use std::fmt::{Formatter, Write};
// #[derive(Debug)]
// pub struct ServerError {
// code: ErrorCode
// }
//
// pub enum ErrorCode {
// InternalError(String),
// ProtobufError(ProtobufError),
// BadRequest(FlowyResponse<String>),
// Unauthorized,
// }
//
//
// impl std::fmt::Display for ErrorCode {
// fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
// match self {
// ErrorCode::InternalError(_) => f.write_str("Internal Server
// Error"), ErrorCode::ProtobufError(err) =>
// f.write_str(&format!("protobuf error: {}", err)),
// ErrorCode::BadRequest(request) => { let msg = format!("Bad
// Request: {:?}", request); f.write_str(&msg)
// },
// ErrorCode::Unauthorized => f.write_str("Unauthorized"),
// }
// }
// }
// impl std::convert::From<ProtobufError> for ServerCode {
// fn from(err: ProtobufError) -> Self { ServerCode::ProtobufError(err) }
// }
//
// impl std::convert::From<reqwest::Error> for ServerError {
// fn from(error: reqwest::Error) -> Self {
// let msg = format!("{:?}", error);
// ServerError::InternalError(msg)
// }
// }
//
// impl std::convert::From<String> for ServerError {
// fn from(error: String) -> Self { ServerError::InternalError(error) }
// }

View File

@ -1,3 +0,0 @@
mod errors;
pub use errors::*;

View File

@ -1,6 +1,4 @@
pub mod errors;
pub mod future;
pub mod config; pub mod config;
pub mod future;
pub mod request; pub mod request;
pub mod response; pub mod response;

View File

@ -1,23 +1,13 @@
use crate::{future::ResultFuture, response::ServerError}; use crate::response::{FlowyResponse, ServerCode, ServerError};
use bytes::Bytes; use bytes::Bytes;
use hyper::http;
use protobuf::{Message, ProtobufError}; use protobuf::{Message, ProtobufError};
use reqwest::{Client, Error, Response}; use reqwest::{Client, Response};
use std::{ use std::{
convert::{TryFrom, TryInto}, convert::{TryFrom, TryInto},
time::Duration, time::Duration,
}; };
use hyper::{StatusCode, http}; use tokio::sync::oneshot;
use tokio::sync::{oneshot, oneshot::error::RecvError};
use crate::response::ServerCode;
// pub async fn http_post<T1, T2>(url: &str, data: T1) -> ResultFuture<T2,
// NetworkError> where
// T1: TryInto<Bytes, Error = ProtobufError> + Send + Sync + 'static,
// T2: TryFrom<Bytes, Error = ProtobufError> + Send + Sync + 'static,
// {
// let url = url.to_owned();
// ResultFuture::new(async move { post(url, data).await })
// }
pub async fn http_post<T1, T2>(url: &str, data: T1) -> Result<T2, ServerError> pub async fn http_post<T1, T2>(url: &str, data: T1) -> Result<T2, ServerError>
where where
@ -37,9 +27,9 @@ where
let response = rx.await??; let response = rx.await??;
if response.status() == http::StatusCode::OK { if response.status() == http::StatusCode::OK {
let response_bytes = response.bytes().await?; let response_bytes = response.bytes().await?;
let data = T2::try_from(response_bytes)?; let flowy_resp: FlowyResponse = serde_json::from_slice(&response_bytes).unwrap();
let data = T2::try_from(flowy_resp.data)?;
Ok(data) Ok(data)
} else { } else {
Err(ServerError { Err(ServerError {
code: ServerCode::InternalError, code: ServerCode::InternalError,

View File

@ -1,6 +1,7 @@
use serde::{Serialize, __private::Formatter}; use bytes::Bytes;
use serde::{Deserialize, Serialize, __private::Formatter};
use serde_repr::*; use serde_repr::*;
use std::{error::Error, fmt}; use std::{convert::TryInto, error::Error, fmt};
use tokio::sync::oneshot::error::RecvError; use tokio::sync::oneshot::error::RecvError;
#[derive(Debug)] #[derive(Debug)]
@ -16,11 +17,11 @@ impl std::fmt::Display for ServerError {
} }
} }
impl std::convert::From<&ServerError> for FlowyResponse<String> { impl std::convert::From<&ServerError> for FlowyResponse {
fn from(error: &ServerError) -> Self { fn from(error: &ServerError) -> Self {
FlowyResponse { FlowyResponse {
msg: error.msg.clone(), msg: error.msg.clone(),
data: None, data: Bytes::from(vec![]),
code: error.code.clone(), code: error.code.clone(),
} }
} }
@ -43,15 +44,15 @@ pub enum ServerCode {
ConnectCancel = 11, ConnectCancel = 11,
} }
#[derive(Debug, Serialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct FlowyResponse<T> { pub struct FlowyResponse {
pub msg: String, pub msg: String,
pub data: Option<T>, pub data: Bytes,
pub code: ServerCode, pub code: ServerCode,
} }
impl<T: Serialize> FlowyResponse<T> { impl FlowyResponse {
pub fn new(data: Option<T>, msg: &str, code: ServerCode) -> Self { pub fn new(data: Bytes, msg: &str, code: ServerCode) -> Self {
FlowyResponse { FlowyResponse {
msg: msg.to_owned(), msg: msg.to_owned(),
data, data,
@ -59,16 +60,13 @@ impl<T: Serialize> FlowyResponse<T> {
} }
} }
pub fn from_data(data: T, msg: &str, code: ServerCode) -> Self { pub fn from<T: TryInto<Bytes, Error = protobuf::ProtobufError>>(
Self::new(Some(data), msg, code) data: T,
} msg: &str,
} code: ServerCode,
) -> Result<Self, ServerError> {
impl FlowyResponse<String> { let bytes: Bytes = data.try_into()?;
pub fn success() -> Self { Self::from_msg("", ServerCode::Success) } Ok(Self::new(bytes, msg, code))
pub fn from_msg(msg: &str, code: ServerCode) -> Self {
Self::new(Some("".to_owned()), msg, code)
} }
} }
@ -90,6 +88,16 @@ impl std::convert::From<RecvError> for ServerError {
} }
} }
impl std::convert::From<serde_json::Error> for ServerError {
fn from(e: serde_json::Error) -> Self {
let msg = format!("Serial error: {:?}", e);
ServerError {
code: ServerCode::SerdeError,
msg,
}
}
}
impl std::convert::From<reqwest::Error> for ServerError { impl std::convert::From<reqwest::Error> for ServerError {
fn from(error: reqwest::Error) -> Self { fn from(error: reqwest::Error) -> Self {
if error.is_timeout() { if error.is_timeout() {
@ -121,9 +129,7 @@ impl std::convert::From<reqwest::Error> for ServerError {
code = ServerCode::ConnectCancel; code = ServerCode::ConnectCancel;
} }
if hyper_error.is_timeout() { if hyper_error.is_timeout() {}
}
ServerError { code, msg } ServerError { code, msg }
}, },

View File

@ -1,30 +1,14 @@
use crate::response::*; use crate::response::*;
use actix_web::{body::Body, error::ResponseError, BaseHttpResponse, HttpResponse}; use actix_web::{body::Body, error::ResponseError, BaseHttpResponse, HttpResponse};
use reqwest::StatusCode;
use serde::Serialize; use serde::Serialize;
impl ServerError {
fn http_response(&self) -> HttpResponse {
let resp: FlowyResponse<String> = self.into();
HttpResponse::Ok().json(resp)
}
}
impl ResponseError for ServerError { impl ResponseError for ServerError {
fn error_response(&self) -> HttpResponse { self.http_response().into() } fn error_response(&self) -> HttpResponse {
} let response: FlowyResponse = self.into();
response.into()
impl<T: Serialize> std::convert::Into<HttpResponse> for FlowyResponse<T> {
fn into(self) -> HttpResponse {
match serde_json::to_string(&self) {
Ok(body) => HttpResponse::Ok().body(Body::from(body)),
Err(e) => {
let msg = format!("Serial error: {:?}", e);
ServerError {
code: ServerCode::SerdeError,
msg,
}
.error_response()
},
}
} }
} }
impl std::convert::Into<HttpResponse> for FlowyResponse {
fn into(self) -> HttpResponse { HttpResponse::Ok().json(self) }
}

View File

@ -1,128 +1,129 @@
use crate::response::{FlowyResponse, ServerCode}; // use crate::response::{FlowyResponse, ServerCode};
use serde::{ // use serde::{
de::{self, MapAccess, Visitor}, // de::{self, MapAccess, Visitor},
Deserialize, // Deserialize,
Deserializer, // Deserializer,
Serialize, // Serialize,
}; // };
use std::{fmt, marker::PhantomData, str::FromStr}; // use std::{fmt, marker::PhantomData, str::FromStr};
//
pub trait ServerData<'a>: Serialize + Deserialize<'a> + FromStr<Err = ()> {} // pub trait ServerData<'a>: Serialize + Deserialize<'a> + FromStr<Err = ()> {}
impl<'de, T: ServerData<'de>> Deserialize<'de> for FlowyResponse<T> { // impl<'de, T: ServerData<'de>> Deserialize<'de> for FlowyResponse<T> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> // fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where // where
D: Deserializer<'de>, // D: Deserializer<'de>,
{ // {
struct ServerResponseVisitor<T>(PhantomData<fn() -> T>); // struct ServerResponseVisitor<T>(PhantomData<fn() -> T>);
impl<'de, T> Visitor<'de> for ServerResponseVisitor<T> // impl<'de, T> Visitor<'de> for ServerResponseVisitor<T>
where // where
T: ServerData<'de>, // T: ServerData<'de>,
{ // {
type Value = FlowyResponse<T>; // type Value = FlowyResponse<T>;
//
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { // fn expecting(&self, formatter: &mut fmt::Formatter) ->
formatter.write_str("struct Duration") // fmt::Result { formatter.write_str("struct Duration")
} // }
//
fn visit_map<V>(self, mut map: V) -> Result<Self::Value, V::Error> // fn visit_map<V>(self, mut map: V) -> Result<Self::Value,
where // V::Error> where
V: MapAccess<'de>, // V: MapAccess<'de>,
{ // {
let mut msg = None; // let mut msg = None;
let mut data: Option<T> = None; // let mut data: Option<T> = None;
let mut code: Option<ServerCode> = None; // let mut code: Option<ServerCode> = None;
while let Some(key) = map.next_key()? { // while let Some(key) = map.next_key()? {
match key { // match key {
"msg" => { // "msg" => {
if msg.is_some() { // if msg.is_some() {
return Err(de::Error::duplicate_field("msg")); // return
} // Err(de::Error::duplicate_field("msg")); }
msg = Some(map.next_value()?); // msg = Some(map.next_value()?);
}, // },
"code" => { // "code" => {
if code.is_some() { // if code.is_some() {
return Err(de::Error::duplicate_field("code")); // return
} // Err(de::Error::duplicate_field("code")); }
code = Some(map.next_value()?); // code = Some(map.next_value()?);
}, // },
"data" => { // "data" => {
if data.is_some() { // if data.is_some() {
return Err(de::Error::duplicate_field("data")); // return
} // Err(de::Error::duplicate_field("data")); }
data = match MapAccess::next_value::<DeserializeWith<T>>(&mut map) { // data = match
Ok(wrapper) => wrapper.value, // MapAccess::next_value::<DeserializeWith<T>>(&mut map) {
Err(err) => return Err(err), // Ok(wrapper) => wrapper.value, Err(err) =>
}; // return Err(err), };
}, // },
_ => panic!(), // _ => panic!(),
} // }
} // }
let msg = msg.ok_or_else(|| de::Error::missing_field("msg"))?; // let msg = msg.ok_or_else(||
let code = code.ok_or_else(|| de::Error::missing_field("code"))?; // de::Error::missing_field("msg"))?; let code =
Ok(Self::Value::new(data, msg, code)) // code.ok_or_else(|| de::Error::missing_field("code"))?;
} // Ok(Self::Value::new(data, msg, code)) }
} // }
const FIELDS: &'static [&'static str] = &["msg", "code", "data"]; // const FIELDS: &'static [&'static str] = &["msg", "code", "data"];
deserializer.deserialize_struct( // deserializer.deserialize_struct(
"ServerResponse", // "ServerResponse",
FIELDS, // FIELDS,
ServerResponseVisitor(PhantomData), // ServerResponseVisitor(PhantomData),
) // )
} // }
} // }
//
struct DeserializeWith<'de, T: ServerData<'de>> { // struct DeserializeWith<'de, T: ServerData<'de>> {
value: Option<T>, // value: Option<T>,
phantom: PhantomData<&'de ()>, // phantom: PhantomData<&'de ()>,
} // }
//
impl<'de, T: ServerData<'de>> Deserialize<'de> for DeserializeWith<'de, T> { // impl<'de, T: ServerData<'de>> Deserialize<'de> for DeserializeWith<'de, T> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> // fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where // where
D: Deserializer<'de>, // D: Deserializer<'de>,
{ // {
Ok(DeserializeWith { // Ok(DeserializeWith {
value: match string_or_data(deserializer) { // value: match string_or_data(deserializer) {
Ok(val) => val, // Ok(val) => val,
Err(e) => return Err(e), // Err(e) => return Err(e),
}, // },
phantom: PhantomData, // phantom: PhantomData,
}) // })
} // }
} // }
//
fn string_or_data<'de, D, T>(deserializer: D) -> Result<Option<T>, D::Error> // fn string_or_data<'de, D, T>(deserializer: D) -> Result<Option<T>, D::Error>
where // where
D: Deserializer<'de>, // D: Deserializer<'de>,
T: ServerData<'de>, // T: ServerData<'de>,
{ // {
struct StringOrData<T>(PhantomData<fn() -> T>); // struct StringOrData<T>(PhantomData<fn() -> T>);
impl<'de, T: ServerData<'de>> Visitor<'de> for StringOrData<T> { // impl<'de, T: ServerData<'de>> Visitor<'de> for StringOrData<T> {
type Value = Option<T>; // type Value = Option<T>;
//
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { // fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("string or struct impl deserialize") // formatter.write_str("string or struct impl deserialize")
} // }
//
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> // fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where // where
E: de::Error, // E: de::Error,
{ // {
match FromStr::from_str(value) { // match FromStr::from_str(value) {
Ok(val) => Ok(Some(val)), // Ok(val) => Ok(Some(val)),
Err(_e) => Ok(None), // Err(_e) => Ok(None),
} // }
} // }
//
fn visit_map<M>(self, map: M) -> Result<Self::Value, M::Error> // fn visit_map<M>(self, map: M) -> Result<Self::Value, M::Error>
where // where
M: MapAccess<'de>, // M: MapAccess<'de>,
{ // {
match Deserialize::deserialize(de::value::MapAccessDeserializer::new(map)) { // match
Ok(val) => Ok(Some(val)), // Deserialize::deserialize(de::value::MapAccessDeserializer::new(map)) {
Err(e) => Err(e), // Ok(val) => Ok(Some(val)),
} // Err(e) => Err(e),
} // }
} // }
deserializer.deserialize_any(StringOrData(PhantomData)) // }
} // deserializer.deserialize_any(StringOrData(PhantomData))
// }

View File

@ -40,7 +40,7 @@ impl RustStreamSender {
} }
} }
pub fn post(observable_subject: ObservableSubject) -> Result<(), String> { pub fn post(_observable_subject: ObservableSubject) -> Result<(), String> {
#[cfg(feature = "dart")] #[cfg(feature = "dart")]
match R2F_STREAM_SENDER.read() { match R2F_STREAM_SENDER.read() {
Ok(stream) => stream.inner_post(observable_subject), Ok(stream) => stream.inner_post(observable_subject),

View File

@ -1,6 +1,6 @@
use crate::{ use crate::{
entities::parser::*, entities::parser::*,
errors::{ErrorBuilder, UserErrCode, UserError}, errors::{ErrorBuilder, UserError},
}; };
use flowy_derive::ProtoBuf; use flowy_derive::ProtoBuf;
use std::convert::TryInto; use std::convert::TryInto;

View File

@ -3,7 +3,6 @@ use crate::{
errors::{ErrorBuilder, UserErrCode, UserError}, errors::{ErrorBuilder, UserErrCode, UserError},
}; };
use bytes::Bytes;
use flowy_net::{config::SIGN_UP_URL, future::ResultFuture, request::http_post}; use flowy_net::{config::SIGN_UP_URL, future::ResultFuture, request::http_post};
use std::sync::Arc; use std::sync::Arc;

View File

@ -5,7 +5,6 @@ pub use flowy_test::prelude::{random_valid_email, valid_password};
pub(crate) fn invalid_email_test_case() -> Vec<String> { pub(crate) fn invalid_email_test_case() -> Vec<String> {
// https://gist.github.com/cjaoude/fd9910626629b53c4d25 // https://gist.github.com/cjaoude/fd9910626629b53c4d25
vec![ vec![
"",
"annie@", "annie@",
"annie@gmail@", "annie@gmail@",
"#@%^%#$@#$@#.com", "#@%^%#$@#$@#.com",
@ -31,7 +30,7 @@ pub(crate) fn invalid_email_test_case() -> Vec<String> {
} }
pub(crate) fn invalid_password_test_case() -> Vec<String> { pub(crate) fn invalid_password_test_case() -> Vec<String> {
vec!["", "123456", "1234".repeat(100).as_str()] vec!["123456", "1234".repeat(100).as_str()]
.iter() .iter()
.map(|s| s.to_string()) .map(|s| s.to_string())
.collect::<Vec<_>>() .collect::<Vec<_>>()

View File

@ -35,7 +35,7 @@ fn sign_in_with_invalid_email() {
.sync_send() .sync_send()
.error() .error()
.code, .code,
UserErrCode::EmailInvalid UserErrCode::EmailFormatInvalid
); );
} }
} }
@ -56,7 +56,7 @@ fn sign_in_with_invalid_password() {
.sync_send() .sync_send()
.error() .error()
.code, .code,
UserErrCode::PasswordInvalid UserErrCode::PasswordFormatInvalid
); );
} }
} }

View File

@ -35,7 +35,7 @@ fn sign_up_with_invalid_email() {
.sync_send() .sync_send()
.error() .error()
.code, .code,
UserErrCode::EmailInvalid UserErrCode::EmailFormatInvalid
); );
} }
} }
@ -56,7 +56,7 @@ fn sign_up_with_invalid_password() {
.sync_send() .sync_send()
.error() .error()
.code, .code,
UserErrCode::PasswordInvalid UserErrCode::PasswordFormatInvalid
); );
} }
} }

View File

@ -86,7 +86,7 @@ fn user_update_with_invalid_email() {
.sync_send() .sync_send()
.error() .error()
.code, .code,
UserErrCode::EmailInvalid UserErrCode::EmailFormatInvalid
); );
} }
} }
@ -111,7 +111,7 @@ fn user_update_with_invalid_password() {
.sync_send() .sync_send()
.error() .error()
.code, .code,
UserErrCode::PasswordInvalid UserErrCode::PasswordFormatInvalid
); );
} }
} }
@ -135,6 +135,6 @@ fn user_update_with_invalid_name() {
.sync_send() .sync_send()
.error() .error()
.code, .code,
UserErrCode::UserNameInvalid UserErrCode::UserIdInvalid
); );
} }

View File

@ -10,4 +10,5 @@ async fn user_register_test() {
password: "123".to_string(), password: "123".to_string(),
}; };
let result = server.sign_up(params).await.unwrap(); let result = server.sign_up(params).await.unwrap();
println!("{:?}", result);
} }

View File

@ -1,6 +1,6 @@
use bytes::Bytes; use bytes::Bytes;
use flowy_derive::ProtoBuf_Enum; use flowy_derive::ProtoBuf_Enum;
use flowy_dispatch::prelude::{DispatchError, ToBytes}; use flowy_dispatch::prelude::ToBytes;
use flowy_observable::{dart::RustStreamSender, entities::ObservableSubject}; use flowy_observable::{dart::RustStreamSender, entities::ObservableSubject};
const OBSERVABLE_CATEGORY: &'static str = "Workspace"; const OBSERVABLE_CATEGORY: &'static str = "Workspace";

View File

@ -9,6 +9,7 @@ rustup component add rustfmt
cargo install cargo-expand cargo install cargo-expand
cargo install cargo-watch cargo install cargo-watch
cargo install cargo-cache cargo install cargo-cache
cargo install bunyan
#protobuf code gen env #protobuf code gen env
brew install protobuf@3.13 brew install protobuf@3.13