auto generate proto file from rust struct

This commit is contained in:
appflowy 2021-07-04 23:31:33 +08:00
parent e5ca614ceb
commit def717be59
48 changed files with 1728 additions and 59 deletions

1
.gitignore vendored
View File

@ -8,3 +8,4 @@ Cargo.lock
# These are backup files generated by rustfmt # These are backup files generated by rustfmt
**/*.rs.bk **/*.rs.bk
**/target/

View File

@ -12,6 +12,8 @@
<sourceFolder url="file://$MODULE_DIR$/rust-lib/flowy-sdk/src" isTestSource="false" /> <sourceFolder url="file://$MODULE_DIR$/rust-lib/flowy-sdk/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/rust-lib/flowy-user/src" isTestSource="false" /> <sourceFolder url="file://$MODULE_DIR$/rust-lib/flowy-user/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/rust-lib/flowy-sdk/tests" isTestSource="true" /> <sourceFolder url="file://$MODULE_DIR$/rust-lib/flowy-sdk/tests" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/rust-lib/flowy-protobuf/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/scripts/flowy-tool/src" isTestSource="false" />
<excludeFolder url="file://$MODULE_DIR$/app_flowy/packages/af_protobuf/.pub" /> <excludeFolder url="file://$MODULE_DIR$/app_flowy/packages/af_protobuf/.pub" />
<excludeFolder url="file://$MODULE_DIR$/app_flowy/packages/af_protobuf/.dart_tool" /> <excludeFolder url="file://$MODULE_DIR$/app_flowy/packages/af_protobuf/.dart_tool" />
<excludeFolder url="file://$MODULE_DIR$/app_flowy/packages/af_protobuf/build" /> <excludeFolder url="file://$MODULE_DIR$/app_flowy/packages/af_protobuf/build" />
@ -52,6 +54,7 @@
<excludeFolder url="file://$MODULE_DIR$/app_flowy/macos/Flutter/ephemeral/.symlinks/plugins/window_size/build" /> <excludeFolder url="file://$MODULE_DIR$/app_flowy/macos/Flutter/ephemeral/.symlinks/plugins/window_size/build" />
<excludeFolder url="file://$MODULE_DIR$/app_flowy/macos/Flutter/ephemeral/.symlinks/plugins/window_size/.dart_tool" /> <excludeFolder url="file://$MODULE_DIR$/app_flowy/macos/Flutter/ephemeral/.symlinks/plugins/window_size/.dart_tool" />
<excludeFolder url="file://$MODULE_DIR$/app_flowy/macos/Flutter/ephemeral/.symlinks/plugins/window_size/.pub" /> <excludeFolder url="file://$MODULE_DIR$/app_flowy/macos/Flutter/ephemeral/.symlinks/plugins/window_size/.pub" />
<excludeFolder url="file://$MODULE_DIR$/scripts/flowy-tool/target" />
</content> </content>
<orderEntry type="inheritedJdk" /> <orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" /> <orderEntry type="sourceFolder" forTests="false" />

View File

@ -1,5 +1,6 @@
extend = [ extend = [
{ path = "scripts/makefile/desktop.toml" }, { path = "scripts/makefile/desktop.toml" },
{ path = "scripts/makefile/protobuf.toml" },
] ]
[env] [env]

27
app_flowy/.vscode/tasks.json vendored Normal file
View File

@ -0,0 +1,27 @@
{
"version": "2.0.0",
"tasks": [
{
// https://code.visualstudio.com/docs/editor/tasks
//https://gist.github.com/deadalusai/9e13e36d61ec7fb72148
// ${workspaceRoot}: the root folder of the team
// ${file}: the current opened file
// ${fileBasename}: the current opened file's basename
// ${fileDirname}: the current opened file's dirname
// ${fileExtname}: the current opened file's extension
// ${cwd}: the current working directory of the spawned process
"type": "shell",
"command": "sh ${workspaceFolder}/../scripts/build_sdk.sh",
"group": "build",
"options": {
"cwd": "${workspaceFolder}/../"
},
"problemMatcher": [
"$rustc"
],
"label": "build rust sdk"
}
]
}

View File

@ -0,0 +1,2 @@
#[build]
#target-dir = "./bin"

1
rust-lib/.gitignore vendored
View File

@ -9,3 +9,4 @@ Cargo.lock
# These are backup files generated by rustfmt # These are backup files generated by rustfmt
**/*.rs.bk **/*.rs.bk
**/**/*.log* **/**/*.log*
bin/

View File

@ -7,6 +7,7 @@ members = [
"flowy-user", "flowy-user",
"flowy-ast", "flowy-ast",
"flowy-derive", "flowy-derive",
"flowy-protobuf",
] ]
[profile.dev] [profile.dev]

View File

@ -116,7 +116,7 @@ impl<'a> ASTField<'a> {
let mut bracket_inner_ty = None; let mut bracket_inner_ty = None;
let mut bracket_ty = None; let mut bracket_ty = None;
let mut bracket_category = Some(BracketCategory::Other); let mut bracket_category = Some(BracketCategory::Other);
match parse_ty(&field.ty) { match parse_ty(cx, &field.ty) {
Some(inner) => { Some(inner) => {
match inner.primitive_ty { match inner.primitive_ty {
PrimitiveTy::Map(map_info) => { PrimitiveTy::Map(map_info) => {
@ -131,6 +131,9 @@ impl<'a> ASTField<'a> {
PrimitiveTy::Opt => { PrimitiveTy::Opt => {
bracket_category = Some(BracketCategory::Opt); bracket_category = Some(BracketCategory::Opt);
}, },
PrimitiveTy::Other => {
bracket_category = Some(BracketCategory::Other);
},
} }
match *inner.bracket_ty_info { match *inner.bracket_ty_info {

View File

@ -1,5 +1,4 @@
use crate::Ctxt;
use syn::{self, AngleBracketedGenericArguments, PathSegment}; use syn::{self, AngleBracketedGenericArguments, PathSegment};
#[derive(Eq, PartialEq, Debug)] #[derive(Eq, PartialEq, Debug)]
@ -7,6 +6,7 @@ pub enum PrimitiveTy {
Map(MapInfo), Map(MapInfo),
Vec, Vec,
Opt, Opt,
Other,
} }
#[derive(Debug)] #[derive(Debug)]
@ -39,7 +39,7 @@ impl<'a> TyInfo<'a> {
} }
} }
pub fn parse_ty(ty: &syn::Type) -> Option<TyInfo> { pub fn parse_ty<'a>(ctxt: &Ctxt, ty: &'a syn::Type) -> Option<TyInfo<'a>> {
// Type -> TypePath -> Path -> PathSegment -> PathArguments -> // Type -> TypePath -> Path -> PathSegment -> PathArguments ->
// AngleBracketedGenericArguments -> GenericArgument -> Type. // AngleBracketedGenericArguments -> GenericArgument -> Type.
if let syn::Type::Path(ref p) = ty { if let syn::Type::Path(ref p) = ty {
@ -52,19 +52,27 @@ pub fn parse_ty(ty: &syn::Type) -> Option<TyInfo> {
None => return None, None => return None,
}; };
let is_option = seg.ident == "Option";
return if let syn::PathArguments::AngleBracketed(ref bracketed) = seg.arguments { return if let syn::PathArguments::AngleBracketed(ref bracketed) = seg.arguments {
match seg.ident.to_string().as_ref() { match seg.ident.to_string().as_ref() {
"HashMap" => generate_hashmap_ty_info(ty, seg, bracketed), "HashMap" => generate_hashmap_ty_info(ctxt, ty, seg, bracketed),
"Vec" => generate_vec_ty_info(seg, bracketed), "Vec" => generate_vec_ty_info(ctxt, seg, bracketed),
"Option" => generate_option_ty_info(ty, seg),
_ => { _ => {
panic!("Unsupported ty") panic!("Unsupported ty")
}, },
} }
} else { } else {
assert_eq!(seg.ident.to_string(), "Option".to_string()); return Some(TyInfo {
generate_option_ty_info(ty, seg) ident: &seg.ident,
ty,
primitive_ty: PrimitiveTy::Other,
bracket_ty_info: Box::new(None),
});
}; };
} }
ctxt.error_spanned_by(ty, format!("Unsupported inner type, get inner type fail"));
None None
} }
@ -83,6 +91,7 @@ fn parse_bracketed(bracketed: &AngleBracketedGenericArguments) -> Vec<&syn::Type
} }
pub fn generate_hashmap_ty_info<'a>( pub fn generate_hashmap_ty_info<'a>(
ctxt: &Ctxt,
ty: &'a syn::Type, ty: &'a syn::Type,
path_segment: &'a PathSegment, path_segment: &'a PathSegment,
bracketed: &'a AngleBracketedGenericArguments, bracketed: &'a AngleBracketedGenericArguments,
@ -92,14 +101,14 @@ pub fn generate_hashmap_ty_info<'a>(
return None; return None;
} }
let types = parse_bracketed(bracketed); let types = parse_bracketed(bracketed);
let key = parse_ty(types[0]).unwrap().ident.to_string(); let key = parse_ty(ctxt, types[0]).unwrap().ident.to_string();
let value = parse_ty(types[1]).unwrap().ident.to_string(); let value = parse_ty(ctxt, types[1]).unwrap().ident.to_string();
let bracket_ty_info = Box::new(parse_ty(&types[1])); let bracket_ty_info = Box::new(parse_ty(ctxt, &types[1]));
return Some(TyInfo { return Some(TyInfo {
ident: &path_segment.ident, ident: &path_segment.ident,
ty, ty,
primitive_ty: PrimitiveTy::Map(MapInfo::new(key, value)), primitive_ty: PrimitiveTy::Map(MapInfo::new(key, value)),
bracket_ty_info: bracket_ty_info, bracket_ty_info,
}); });
} }
@ -107,6 +116,7 @@ fn generate_option_ty_info<'a>(
ty: &'a syn::Type, ty: &'a syn::Type,
path_segment: &'a PathSegment, path_segment: &'a PathSegment,
) -> Option<TyInfo<'a>> { ) -> Option<TyInfo<'a>> {
assert_eq!(path_segment.ident.to_string(), "Option".to_string());
return Some(TyInfo { return Some(TyInfo {
ident: &path_segment.ident, ident: &path_segment.ident,
ty, ty,
@ -116,6 +126,7 @@ fn generate_option_ty_info<'a>(
} }
fn generate_vec_ty_info<'a>( fn generate_vec_ty_info<'a>(
ctxt: &Ctxt,
path_segment: &'a PathSegment, path_segment: &'a PathSegment,
bracketed: &'a AngleBracketedGenericArguments, bracketed: &'a AngleBracketedGenericArguments,
) -> Option<TyInfo<'a>> { ) -> Option<TyInfo<'a>> {
@ -123,7 +134,7 @@ fn generate_vec_ty_info<'a>(
return None; return None;
} }
if let syn::GenericArgument::Type(ref bracketed_type) = bracketed.args.first().unwrap() { if let syn::GenericArgument::Type(ref bracketed_type) = bracketed.args.first().unwrap() {
let bracketed_ty_info = Box::new(parse_ty(&bracketed_type)); let bracketed_ty_info = Box::new(parse_ty(ctxt, &bracketed_type));
return Some(TyInfo { return Some(TyInfo {
ident: &path_segment.ident, ident: &path_segment.ident,
ty: bracketed_type, ty: bracketed_type,

2
rust-lib/flowy-derive/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
/target
Cargo.lock

View File

@ -0,0 +1,25 @@
[package]
name = "flowy-derive"
version = "0.1.0"
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
proc-macro = true
name = "flowy_derive"
[[test]]
name = "tests"
path = "tests/progress.rs"
[dependencies]
syn = { version = "1.0.60", features = ["extra-traits", "visit"] }
quote = "1.0"
proc-macro2 = "1.0"
flowy-ast = { path = "../flowy-ast" }
[dev-dependencies]
tokio = { version = "1", features = ["full"] }
trybuild = "1.0.40"
log = "0.4.11"

View File

@ -0,0 +1,24 @@
pub enum TypeCategory {
Array,
Map,
Str,
Protobuf,
Bytes,
Enum,
Opt,
Primitive,
}
// auto generate, do not edit
pub fn category_from_str(type_str: &str) -> TypeCategory {
match type_str {
"Vec" => TypeCategory::Array,
"HashMap" => TypeCategory::Map,
"u8" => TypeCategory::Bytes,
"String" => TypeCategory::Str,
"App"
=> TypeCategory::Protobuf,
"Option" => TypeCategory::Opt,
_ => TypeCategory::Primitive,
}
}

View File

@ -0,0 +1,3 @@
mod derive_cache;
pub use derive_cache::*;

View File

@ -0,0 +1,35 @@
// https://docs.rs/syn/1.0.48/syn/struct.DeriveInput.html
#![feature(str_split_once)]
extern crate proc_macro;
use proc_macro::TokenStream;
use quote::quote;
use syn::{parse_macro_input, DeriveInput};
#[macro_use]
extern crate quote;
mod derive_cache;
mod proto_buf;
// Inspired by https://serde.rs/attributes.html
#[proc_macro_derive(ProtoBuf, attributes(pb))]
pub fn derive_proto_buf(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
proto_buf::expand_derive(&input)
.unwrap_or_else(to_compile_errors)
.into()
}
#[proc_macro_derive(ProtoBuf_Enum, attributes(pb))]
pub fn derive_proto_buf_enum(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
proto_buf::expand_enum_derive(&input)
.unwrap_or_else(to_compile_errors)
.into()
}
fn to_compile_errors(errors: Vec<syn::Error>) -> proc_macro2::TokenStream {
let compile_errors = errors.iter().map(syn::Error::to_compile_error);
quote!(#(#compile_errors)*)
}

View File

@ -0,0 +1,223 @@
use crate::{derive_cache::TypeCategory, proto_buf::util::*};
use flowy_ast::*;
use proc_macro2::{Span, TokenStream};
pub fn make_de_token_steam(ctxt: &Ctxt, ast: &ASTContainer) -> Option<TokenStream> {
let pb_ty = ast.attrs.pb_struct_type()?;
let struct_ident = &ast.ident;
let build_take_fields = ast
.data
.all_fields()
.filter(|f| !f.attrs.skip_deserializing())
.flat_map(|field| {
if let Some(func) = field.attrs.deserialize_with() {
let member = &field.member;
Some(quote! { o.#member=#struct_ident::#func(pb); })
} else if field.attrs.is_one_of() {
token_stream_for_one_of(ctxt, field)
} else {
token_stream_for_field(ctxt, &field.member, &field.ty, false)
}
});
let de_token_stream: TokenStream = quote! {
impl std::convert::TryFrom<&Vec<u8>> for #struct_ident {
type Error = String;
fn try_from(bytes: &Vec<u8>) -> Result<Self, Self::Error> {
let result: ::protobuf::ProtobufResult<flowy_protobuf::#pb_ty> = ::protobuf::Message::parse_from_bytes(bytes);
match result {
Ok(mut pb) => {
#struct_ident::try_from(&mut pb)
}
Err(e) => Err(format!("{:?}", e)),
}
}
}
impl std::convert::TryFrom<&mut flowy_protobuf::#pb_ty> for #struct_ident {
type Error = String;
fn try_from(pb: &mut flowy_protobuf::#pb_ty) -> Result<Self, Self::Error> {
let mut o = Self::default();
#(#build_take_fields)*
Ok(o)
}
}
}
.into();
Some(de_token_stream)
}
fn token_stream_for_one_of(ctxt: &Ctxt, field: &ASTField) -> Option<TokenStream> {
let member = &field.member;
let ident = get_member_ident(ctxt, member)?;
let ty_info = parse_ty(ctxt, &field.ty)?;
let has_func = format_ident!("has_{}", ident.to_string());
match ident_category(ty_info.ident) {
TypeCategory::Enum => {
let get_func = format_ident!("get_{}", ident.to_string());
let ty = ty_info.ty;
Some(quote! {
if pb.#has_func() {
let enum_de_from_pb = #ty::try_from(&mut pb.#get_func()).unwrap();
o.#member = Some(enum_de_from_pb);
}
})
},
TypeCategory::Primitive => {
let get_func = format_ident!("get_{}", ident.to_string());
Some(quote! {
if pb.#has_func() {
o.#member=Some(pb.#get_func());
}
})
},
_ => {
let take_func = format_ident!("take_{}", ident.to_string());
let ty = ty_info.ty;
Some(quote! {
if pb.#has_func() {
let struct_de_from_pb = #ty::try_from(&mut pb.#take_func()).unwrap();
o.#member=Some(struct_de_from_pb);
}
})
},
}
}
fn token_stream_for_field(
ctxt: &Ctxt,
member: &syn::Member,
ty: &syn::Type,
is_option: bool,
) -> Option<TokenStream> {
let ident = get_member_ident(ctxt, member)?;
let ty_info = parse_ty(ctxt, ty)?;
match ident_category(ty_info.ident) {
TypeCategory::Array => {
assert_bracket_ty_is_some(ctxt, &ty_info);
token_stream_for_vec(ctxt, &member, &ty_info.bracket_ty_info.unwrap())
},
TypeCategory::Map => {
assert_bracket_ty_is_some(ctxt, &ty_info);
token_stream_for_map(ctxt, &member, &ty_info.bracket_ty_info.unwrap())
},
TypeCategory::Protobuf => {
// if the type wrapped by SingularPtrField, should call take first
let take = syn::Ident::new("take", Span::call_site());
// inner_type_ty would be the type of the field. (e.g value of AnyData)
let ty = ty_info.ty;
Some(quote! {
let some_value = pb.#member.#take();
if some_value.is_some() {
let struct_de_from_pb = #ty::try_from(&mut some_value.unwrap()).unwrap();
o.#member = struct_de_from_pb;
}
})
},
TypeCategory::Enum => {
let ty = ty_info.ty;
Some(quote! {
let enum_de_from_pb = #ty::try_from(&mut pb.#member).unwrap();
o.#member = enum_de_from_pb;
})
},
TypeCategory::Str => {
let take_ident =
syn::Ident::new(&format!("take_{}", ident.to_string()), Span::call_site());
if is_option {
Some(quote! {
if pb.#member.is_empty() {
o.#member = None;
} else {
o.#member = Some(pb.#take_ident());
}
})
} else {
Some(quote! {
o.#member = pb.#take_ident();
})
}
},
TypeCategory::Opt => {
token_stream_for_field(ctxt, member, ty_info.bracket_ty_info.unwrap().ty, true)
},
TypeCategory::Primitive | TypeCategory::Bytes => {
// eprintln!("😄 #{:?}", &field.name().unwrap());
if is_option {
Some(quote! { o.#member = Some(pb.#member.clone()); })
} else {
Some(quote! { o.#member = pb.#member.clone(); })
}
},
}
}
fn token_stream_for_vec(
ctxt: &Ctxt,
member: &syn::Member,
bracketed_type: &TyInfo,
) -> Option<TokenStream> {
let ident = get_member_ident(ctxt, member)?;
match ident_category(bracketed_type.ident) {
TypeCategory::Protobuf => {
let ty = bracketed_type.ty;
// Deserialize from pb struct of type vec, should call take_xx(), get the
// repeated_field and then calling the into_iter。
let take_ident = format_ident!("take_{}", ident.to_string());
Some(quote! {
o.#member = pb.#take_ident()
.into_iter()
.map(|mut m| #ty::try_from(&mut m).unwrap())
.collect();
})
},
TypeCategory::Bytes => {
// Vec<u8>
Some(quote! {
o.#member = pb.#member.clone();
})
},
_ => {
// String
let take_ident = format_ident!("take_{}", ident.to_string());
Some(quote! {
o.#member = pb.#take_ident().into_vec();
})
},
}
}
fn token_stream_for_map(
ctxt: &Ctxt,
member: &syn::Member,
bracketed_type: &TyInfo,
) -> Option<TokenStream> {
let ident = get_member_ident(ctxt, member)?;
let take_ident = format_ident!("take_{}", ident.to_string());
let ty = bracketed_type.ty;
match ident_category(bracketed_type.ident) {
TypeCategory::Protobuf => Some(quote! {
let mut m: std::collections::HashMap<String, #ty> = std::collections::HashMap::new();
pb.#take_ident().into_iter().for_each(|(k,mut v)| {
m.insert(k.clone(), #ty::try_from(&mut v).unwrap());
});
o.#member = m;
}),
_ => Some(quote! {
let mut m: std::collections::HashMap<String, #ty> = std::collections::HashMap::new();
pb.#take_ident().into_iter().for_each(|(k,mut v)| {
m.insert(k.clone(), v);
});
o.#member = m;
}),
}
}

View File

@ -0,0 +1,32 @@
use flowy_ast::*;
use proc_macro2::TokenStream;
#[allow(dead_code)]
pub fn make_enum_token_stream(_ctxt: &Ctxt, cont: &ASTContainer) -> Option<TokenStream> {
let enum_ident = &cont.ident;
let pb_enum = cont.attrs.pb_enum_type()?;
let build_to_pb_enum = cont.data.all_idents().map(|i| {
let token_stream: TokenStream = quote! {
#enum_ident::#i => #pb_enum::#i,
};
token_stream
});
let build_from_pb_enum = cont.data.all_idents().map(|i| {
let token_stream: TokenStream = quote! {
#pb_enum::#i => #enum_ident::#i,
};
token_stream
});
Some(quote! {
impl std::convert::TryFrom<#pb_enum> for #enum_ident {
type Error = String;
fn try_from(pb: #pb_enum) -> Result<Self, Self::Error> {
match field_type {
#(#build_from_pb_enum)*
}
}
}
})
}

View File

@ -0,0 +1,42 @@
mod deserialize;
mod enum_serde;
mod util;
use crate::proto_buf::{deserialize::make_de_token_steam, enum_serde::make_enum_token_stream};
use flowy_ast::*;
use proc_macro2::TokenStream;
pub fn expand_derive(input: &syn::DeriveInput) -> Result<TokenStream, Vec<syn::Error>> {
let ctxt = Ctxt::new();
let cont = match ASTContainer::from_ast(&ctxt, input) {
Some(cont) => cont,
None => return Err(ctxt.check().unwrap_err()),
};
let mut token_stream: TokenStream = TokenStream::default();
let de_token_stream = make_de_token_steam(&ctxt, &cont);
if de_token_stream.is_some() {
token_stream.extend(de_token_stream.unwrap());
}
ctxt.check()?;
Ok(token_stream)
}
pub fn expand_enum_derive(input: &syn::DeriveInput) -> Result<TokenStream, Vec<syn::Error>> {
let ctxt = Ctxt::new();
let cont = match ASTContainer::from_ast(&ctxt, input) {
Some(cont) => cont,
None => return Err(ctxt.check().unwrap_err()),
};
let mut token_stream: TokenStream = TokenStream::default();
let enum_token_stream = make_enum_token_stream(&ctxt, &cont);
if enum_token_stream.is_some() {
token_stream.extend(enum_token_stream.unwrap());
}
ctxt.check()?;
Ok(token_stream)
}

View File

@ -1,10 +0,0 @@
pub trait SerializeProtoBuf {
type ProtoBufType;
fn to_protobuf(&self) -> Self::ProtoBufType;
}
pub trait DeserializeProtoBuf {
type ProtoBufType;
type ObjectType;
fn from_protobuf(pb: &mut Self::ProtoBufType) -> Self::ObjectType;
}

View File

@ -1,17 +1,25 @@
pub enum TypeCategory { use crate::derive_cache::*;
Array, use flowy_ast::{Ctxt, TyInfo};
Map,
Str,
Protobuf,
Bytes,
Enum,
Opt,
Primitive,
}
fn category_from_str(type_str: &str) -> TypeCategory { TypeCategory::Protobuf }
pub fn ident_category(ident: &syn::Ident) -> TypeCategory { pub fn ident_category(ident: &syn::Ident) -> TypeCategory {
let ident_str: &str = &ident.to_string(); let ident_str: &str = &ident.to_string();
category_from_str(ident_str) category_from_str(ident_str)
} }
pub(crate) fn get_member_ident<'a>(ctxt: &Ctxt, member: &'a syn::Member) -> Option<&'a syn::Ident> {
if let syn::Member::Named(ref ident) = member {
Some(ident)
} else {
ctxt.error_spanned_by(member, format!("Unsupported member, shouldn't be self.0"));
None
}
}
pub fn assert_bracket_ty_is_some(ctxt: &Ctxt, ty_info: &TyInfo) {
if ty_info.bracket_ty_info.is_none() {
ctxt.error_spanned_by(
ty_info.ty,
format!("Invalid bracketed type when gen de token steam"),
);
}
}

View File

@ -0,0 +1,5 @@
#[tokio::test]
async fn tests() {
let t = trybuild::TestCases::new();
t.pass("tests/protobuf_enum.rs");
}

View File

@ -0,0 +1,9 @@
[package]
name = "flowy-protobuf"
version = "0.1.0"
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
protobuf = {version = "2.20.0"}

View File

@ -0,0 +1,7 @@
syntax = "proto3";
message App {
string id = 1;
string workspace_id = 2;
string name = 3;
}

View File

@ -0,0 +1,4 @@
max_width = 80
ignore = [
"**/define/*.proto",
]

View File

@ -0,0 +1,3 @@
mod model;
pub use model::*;

View File

@ -0,0 +1,3 @@
mod user;
pub use user::*;

View File

@ -0,0 +1,295 @@
// This file is generated by rust-protobuf 2.22.1. Do not edit
// @generated
// https://github.com/rust-lang/rust-clippy/issues/702
#![allow(unknown_lints)]
#![allow(clippy::all)]
#![allow(unused_attributes)]
#![cfg_attr(rustfmt, rustfmt::skip)]
#![allow(box_pointers)]
#![allow(dead_code)]
#![allow(missing_docs)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(non_upper_case_globals)]
#![allow(trivial_casts)]
#![allow(unused_imports)]
#![allow(unused_results)]
//! Generated file from `user.proto`
/// Generated files are compatible only with the same version
/// of protobuf runtime.
// const _PROTOBUF_VERSION_CHECK: () = ::protobuf::VERSION_2_22_1;
#[derive(PartialEq,Clone,Default)]
pub struct App {
// message fields
pub id: ::std::string::String,
pub workspace_id: ::std::string::String,
pub name: ::std::string::String,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a App {
fn default() -> &'a App {
<App as ::protobuf::Message>::default_instance()
}
}
impl App {
pub fn new() -> App {
::std::default::Default::default()
}
// string id = 1;
pub fn get_id(&self) -> &str {
&self.id
}
pub fn clear_id(&mut self) {
self.id.clear();
}
// Param is passed by value, moved
pub fn set_id(&mut self, v: ::std::string::String) {
self.id = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_id(&mut self) -> &mut ::std::string::String {
&mut self.id
}
// Take field
pub fn take_id(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.id, ::std::string::String::new())
}
// string workspace_id = 2;
pub fn get_workspace_id(&self) -> &str {
&self.workspace_id
}
pub fn clear_workspace_id(&mut self) {
self.workspace_id.clear();
}
// Param is passed by value, moved
pub fn set_workspace_id(&mut self, v: ::std::string::String) {
self.workspace_id = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_workspace_id(&mut self) -> &mut ::std::string::String {
&mut self.workspace_id
}
// Take field
pub fn take_workspace_id(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.workspace_id, ::std::string::String::new())
}
// string name = 3;
pub fn get_name(&self) -> &str {
&self.name
}
pub fn clear_name(&mut self) {
self.name.clear();
}
// Param is passed by value, moved
pub fn set_name(&mut self, v: ::std::string::String) {
self.name = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_name(&mut self) -> &mut ::std::string::String {
&mut self.name
}
// Take field
pub fn take_name(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.name, ::std::string::String::new())
}
}
impl ::protobuf::Message for App {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.id)?;
},
2 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.workspace_id)?;
},
3 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.name)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.id.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.id);
}
if !self.workspace_id.is_empty() {
my_size += ::protobuf::rt::string_size(2, &self.workspace_id);
}
if !self.name.is_empty() {
my_size += ::protobuf::rt::string_size(3, &self.name);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.id.is_empty() {
os.write_string(1, &self.id)?;
}
if !self.workspace_id.is_empty() {
os.write_string(2, &self.workspace_id)?;
}
if !self.name.is_empty() {
os.write_string(3, &self.name)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> App {
App::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"id",
|m: &App| { &m.id },
|m: &mut App| { &mut m.id },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"workspace_id",
|m: &App| { &m.workspace_id },
|m: &mut App| { &mut m.workspace_id },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"name",
|m: &App| { &m.name },
|m: &mut App| { &mut m.name },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<App>(
"App",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static App {
static instance: ::protobuf::rt::LazyV2<App> = ::protobuf::rt::LazyV2::INIT;
instance.get(App::new)
}
}
impl ::protobuf::Clear for App {
fn clear(&mut self) {
self.id.clear();
self.workspace_id.clear();
self.name.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for App {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for App {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
static file_descriptor_proto_data: &'static [u8] = b"\
\n\nuser.proto\"L\n\x03App\x12\x0e\n\x02id\x18\x01\x20\x01(\tR\x02id\x12\
!\n\x0cworkspace_id\x18\x02\x20\x01(\tR\x0bworkspaceId\x12\x12\n\x04name\
\x18\x03\x20\x01(\tR\x04nameJ\xcf\x01\n\x06\x12\x04\0\0\x06\x01\n\x08\n\
\x01\x0c\x12\x03\0\0\x12\n\n\n\x02\x04\0\x12\x04\x02\0\x06\x01\n\n\n\x03\
\x04\0\x01\x12\x03\x02\x08\x0b\n\x0b\n\x04\x04\0\x02\0\x12\x03\x03\x04\
\x12\n\x0c\n\x05\x04\0\x02\0\x05\x12\x03\x03\x04\n\n\x0c\n\x05\x04\0\x02\
\0\x01\x12\x03\x03\x0b\r\n\x0c\n\x05\x04\0\x02\0\x03\x12\x03\x03\x10\x11\
\n\x0b\n\x04\x04\0\x02\x01\x12\x03\x04\x04\x1c\n\x0c\n\x05\x04\0\x02\x01\
\x05\x12\x03\x04\x04\n\n\x0c\n\x05\x04\0\x02\x01\x01\x12\x03\x04\x0b\x17\
\n\x0c\n\x05\x04\0\x02\x01\x03\x12\x03\x04\x1a\x1b\n\x0b\n\x04\x04\0\x02\
\x02\x12\x03\x05\x04\x14\n\x0c\n\x05\x04\0\x02\x02\x05\x12\x03\x05\x04\n\
\n\x0c\n\x05\x04\0\x02\x02\x01\x12\x03\x05\x0b\x0f\n\x0c\n\x05\x04\0\x02\
\x02\x03\x12\x03\x05\x12\x13b\x06proto3\
";
static file_descriptor_proto_lazy: ::protobuf::rt::LazyV2<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::rt::LazyV2::INIT;
fn parse_descriptor_proto() -> ::protobuf::descriptor::FileDescriptorProto {
::protobuf::Message::parse_from_bytes(file_descriptor_proto_data).unwrap()
}
pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {
file_descriptor_proto_lazy.get(|| {
parse_descriptor_proto()
})
}

View File

@ -116,29 +116,6 @@ impl<T> ops::DerefMut for Data<T> {
fn deref_mut(&mut self) -> &mut T { &mut self.0 } fn deref_mut(&mut self) -> &mut T { &mut self.0 }
} }
// #[cfg(feature = "use_serde")]
// impl<T> FromRequest for Data<T>
// where
// T: serde::de::DeserializeOwned + 'static,
// {
// type Error = SystemError;
// type Future = Ready<Result<Self, SystemError>>;
//
// #[inline]
// fn from_request(req: &EventRequest, payload: &mut Payload) ->
// Self::Future { match payload {
// Payload::None => ready(Err(unexpected_none_payload(req))),
// Payload::Bytes(bytes) => {
// let s = String::from_utf8_lossy(bytes);
// match serde_json::from_str(s.as_ref()) {
// Ok(data) => ready(Ok(Data(data))),
// Err(e) => ready(Err(InternalError::new(format!("{:?}",
// e)).into())), }
// },
// }
// }
// }
pub trait FromBytes: Sized { pub trait FromBytes: Sized {
fn parse_from_bytes(bytes: &Vec<u8>) -> Result<Self, SystemError>; fn parse_from_bytes(bytes: &Vec<u8>) -> Result<Self, SystemError>;
} }

View File

@ -9,6 +9,8 @@ edition = "2018"
derive_more = {version = "0.99", features = ["display"]} derive_more = {version = "0.99", features = ["display"]}
flowy-sys = { path = "../flowy-sys" } flowy-sys = { path = "../flowy-sys" }
flowy-log = { path = "../flowy-log" } flowy-log = { path = "../flowy-log" }
flowy-derive = { path = "../flowy-derive" }
flowy-protobuf = { path = "../flowy-protobuf" }
tracing = { version = "0.1", features = ["log"] } tracing = { version = "0.1", features = ["log"] }
bytes = "1.0" bytes = "1.0"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
@ -16,6 +18,7 @@ validator = "0.12.0"
rand = { version = "0.8", features=["std_rng"] } rand = { version = "0.8", features=["std_rng"] }
unicode-segmentation = "1.7.1" unicode-segmentation = "1.7.1"
log = "0.4.14" log = "0.4.14"
protobuf = {version = "2.18.0"}
[dev-dependencies] [dev-dependencies]
quickcheck = "0.9.2" quickcheck = "0.9.2"

View File

@ -1,4 +1,5 @@
use crate::domain::{user_email::UserEmail, user_name::UserName}; use crate::domain::{user_email::UserEmail, user_name::UserName};
use flowy_derive::ProtoBuf;
pub struct User { pub struct User {
name: UserName, name: UserName,
@ -8,3 +9,15 @@ pub struct User {
impl User { impl User {
pub fn new(name: UserName, email: UserEmail) -> Self { Self { name, email } } pub fn new(name: UserName, email: UserEmail) -> Self { Self { name, email } }
} }
#[derive(ProtoBuf, Default)]
pub struct App {
#[pb(index = 1)]
pub id: String,
#[pb(index = 2)]
pub workspace_id: String, // equal to #[belongs_to(Workspace, foreign_key = "workspace_id")].
#[pb(index = 3)]
pub name: String,
}

10
scripts/flowy-tool/.gitignore vendored Normal file
View File

@ -0,0 +1,10 @@
# Generated by Cargo
# will have compiled files and executables
/target/
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
Cargo.lock
# These are backup files generated by rustfmt
**/*.rs.bk

View File

@ -0,0 +1,22 @@
[package]
name = "flowy-tool"
version = "0.1.0"
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
clap = "2.33.3"
walkdir = "2.3.1"
syn = { version = "1.0.60", features = ["extra-traits", "parsing", "derive", "full"]}
tera = { version = "1.5.0" }
log = "0.4.11"
env_logger = "0.8.2"
shell = { git="https://github.com/google/rust-shell.git"}
flowy-ast = { path = "../../rust-lib/flowy-ast" }
console = "0.14.0"
fancy-regex = "0.5.0"
lazy_static = "1.4.0"
phf = { version = "0.8.0", features = ["macros"] }
similar = "1.2.2"
dialoguer = "0.8.0"

View File

@ -0,0 +1,68 @@
mod proto;
mod util;
use clap::{App, Arg};
fn main() {
std::env::set_var("RUST_LOG", "Debug");
env_logger::init();
let matches = app().get_matches();
if let Some(ref matches) = matches.subcommand_matches("pb-gen") {
let rust_source = matches.value_of("rust_source").unwrap();
let build_cache = matches.value_of("build_cache").unwrap();
let rust_mod_dir = matches.value_of("rust_mod_dir").unwrap();
let flutter_mod_dir = matches.value_of("flutter_mod_dir").unwrap();
let proto_file_output = matches.value_of("proto_file_output").unwrap();
proto::ProtoGen::new()
.set_rust_source_dir(rust_source)
.set_build_cache_dir(build_cache)
.set_rust_mod_dir(rust_mod_dir)
.set_flutter_mod_dir(flutter_mod_dir)
.set_proto_file_output_dir(proto_file_output)
.gen();
}
}
pub fn app<'a, 'b>() -> App<'a, 'b> {
let app = App::new("flowy-tool")
.version("0.1")
.author("nathan")
.about("flowy tool")
.subcommand(
App::new("pb-gen")
.about("Generate proto file from rust code")
.arg(
Arg::with_name("rust_source")
.long("rust_source")
.value_name("DIRECTORY")
.help("The directory to the rust code"),
)
.arg(
Arg::with_name("build_cache")
.long("build_cache")
.value_name("PATH")
.help("Caching information used by flowy-derive"),
)
.arg(
Arg::with_name("rust_mod_dir")
.long("rust_mod_dir")
.value_name("DIRECTORY"),
)
.arg(
Arg::with_name("flutter_mod_dir")
.long("flutter_mod_dir")
.value_name("DIRECTORY"),
)
.arg(
Arg::with_name("proto_file_output")
.long("proto_file_output")
.value_name("DIRECTORY")
.help("The path is used to save the generated proto file"),
),
);
app
}

View File

@ -0,0 +1,190 @@
use crate::proto::helper::*;
use crate::proto::template::{EnumTemplate, StructTemplate};
use crate::util::*;
use flowy_ast::*;
use syn::Item;
use walkdir::WalkDir;
pub fn parse_crate_protobuf(root: &str, proto_output_dir: &str) -> Vec<CrateProtoInfo> {
log::info!("Generate proto file from {}", root);
let domains_info = get_crate_domain_directory(root);
domains_info
.iter()
.map(|domain| {
let files = parse_files_protobuf(&domain.path, proto_output_dir);
CrateProtoInfo::new(&domain, files)
})
.collect::<Vec<CrateProtoInfo>>()
}
fn parse_files_protobuf(root: &str, proto_output_dir: &str) -> Vec<FileProtoInfo> {
let mut gen_proto_vec: Vec<FileProtoInfo> = vec![];
// file_stem https://doc.rust-lang.org/std/path/struct.Path.html#method.file_stem
for (path, file_name) in WalkDir::new(root)
.into_iter()
.filter_entry(|e| !is_hidden(e))
.filter_map(|e| e.ok())
.filter(|e| e.file_type().is_dir() == false)
.map(|e| {
let path = e.path().to_str().unwrap().to_string();
let file_name = e.path().file_stem().unwrap().to_str().unwrap().to_string();
(path, file_name)
})
{
if file_name == "mod" {
continue;
}
// https://docs.rs/syn/1.0.54/syn/struct.File.html
let ast =
syn::parse_file(read_file(&path).unwrap().as_ref()).expect("Unable to parse file");
let structs = get_ast_structs(&ast);
// println!("😁 {} - {}", path, file_name);
let proto_file_path = format!("{}/{}.proto", proto_output_dir, &file_name);
let mut proto_file_content = parse_or_init_proto_file(proto_file_path.as_ref());
structs.iter().for_each(|s| {
let mut struct_template = StructTemplate::new();
struct_template.set_message_struct_name(&s.name);
s.fields
.iter()
.filter(|f| f.attrs.pb_index().is_some())
.for_each(|f| {
struct_template.set_field(&f);
});
let s = struct_template.render().unwrap();
proto_file_content.push_str(s.as_ref());
proto_file_content.push_str("\n");
});
let enums = get_ast_enums(&ast);
enums.iter().for_each(|e| {
let mut enum_template = EnumTemplate::new();
enum_template.set_message_enum(&e);
let s = enum_template.render().unwrap();
proto_file_content.push_str(s.as_ref());
proto_file_content.push_str("\n");
});
if !enums.is_empty() || !structs.is_empty() {
let info = FileProtoInfo {
file_name: file_name.clone(),
structs: structs.iter().map(|s| s.name.clone()).collect(),
enums: enums.iter().map(|e| e.name.clone()).collect(),
generated_content: proto_file_content.clone(),
};
gen_proto_vec.push(info);
}
}
gen_proto_vec
}
pub fn parse_or_init_proto_file(path: &str) -> String {
let mut proto_file_content = String::new();
let imported_content = find_proto_file_import(path);
proto_file_content.push_str(imported_content.as_ref());
proto_file_content.push_str("\n");
proto_file_content
}
pub fn get_ast_structs(ast: &syn::File) -> Vec<Struct> {
// let mut content = format!("{:#?}", &ast);
// let mut file = File::create("./foo.txt").unwrap();
// file.write_all(content.as_bytes()).unwrap();
let ctxt = Ctxt::new();
let mut proto_structs: Vec<Struct> = vec![];
ast.items.iter().for_each(|item| match item {
Item::Struct(item_struct) => {
let (_, fields) = struct_from_ast(&ctxt, &item_struct.fields);
if fields
.iter()
.filter(|f| f.attrs.pb_index().is_some())
.count()
> 0
{
proto_structs.push(Struct {
name: item_struct.ident.to_string(),
fields,
});
}
}
_ => {}
});
ctxt.check().unwrap();
proto_structs
}
pub fn get_ast_enums(ast: &syn::File) -> Vec<FlowyEnum> {
let mut flowy_enums: Vec<FlowyEnum> = vec![];
let ctxt = Ctxt::new();
ast.items.iter().for_each(|item| {
// https://docs.rs/syn/1.0.54/syn/enum.Item.html
match item {
Item::Enum(item_enum) => {
flowy_enums.push(FlowyEnum {
name: item_enum.ident.to_string(),
attrs: flowy_ast::enum_from_ast(&ctxt, &item_enum.variants),
});
}
_ => {}
}
});
ctxt.check().unwrap();
flowy_enums
}
pub struct FlowyEnum<'a> {
pub name: String,
pub attrs: Vec<ASTEnumVariant<'a>>,
}
pub struct Struct<'a> {
pub name: String,
pub fields: Vec<ASTField<'a>>,
}
use fancy_regex::Regex;
use lazy_static::lazy_static;
use std::{fs::File, io::Read, path::Path};
lazy_static! {
static ref SYNTAX_REGEX: Regex = Regex::new("syntax.*;").unwrap();
static ref IMPORT_REGEX: Regex = Regex::new("(import\\s).*;").unwrap();
}
fn find_proto_file_import(path: &str) -> String {
let mut result = String::new();
if !Path::new(path).exists() {
// log::error!("{} not exist", path);
result = String::from("syntax = \"proto3\";");
return result;
}
let mut file = File::open(path).unwrap();
let mut content = String::new();
file.read_to_string(&mut content).unwrap();
content.lines().for_each(|line| {
////Result<Option<Match<'t>>>
if let Ok(some_line) = SYNTAX_REGEX.find(line) {
if let Some(m) = some_line {
result.push_str(m.as_str());
result.push_str("\n");
}
}
if let Ok(some_line) = IMPORT_REGEX.find(line) {
if let Some(m) = some_line {
result.push_str(m.as_str());
result.push_str("\n");
}
}
});
result
}

View File

@ -0,0 +1,71 @@
use walkdir::WalkDir;
pub struct CrateInfo {
pub name: String,
pub path: String,
}
pub struct CrateProtoInfo {
pub files: Vec<FileProtoInfo>,
pub name: String,
pub path: String,
}
impl CrateProtoInfo {
pub fn new(info: &CrateInfo, files: Vec<FileProtoInfo>) -> Self {
Self {
files,
name: info.name.to_owned(),
path: info.path.to_owned(),
}
}
}
#[derive(Debug)]
pub struct FileProtoInfo {
pub file_name: String,
pub structs: Vec<String>,
pub enums: Vec<String>,
pub generated_content: String,
}
pub fn get_crate_domain_directory(root: &str) -> Vec<CrateInfo> {
WalkDir::new(root)
.into_iter()
.filter_entry(|e| !is_hidden(e))
.filter_map(|e| e.ok())
.filter(|e| is_domain_dir(e))
.map(|e| CrateInfo {
//TODO: get the crate name from toml file
name: e
.path()
.parent()
.unwrap()
.parent()
.unwrap()
.file_stem()
.unwrap()
.to_str()
.unwrap()
.to_string(),
path: e.path().to_str().unwrap().to_string(),
})
.collect::<Vec<CrateInfo>>()
}
pub fn is_domain_dir(e: &walkdir::DirEntry) -> bool {
let domain = e.path().file_stem().unwrap().to_str().unwrap().to_string();
if e.file_type().is_dir() && domain == "domain".to_string() {
true
} else {
false
}
}
pub fn is_hidden(entry: &walkdir::DirEntry) -> bool {
entry
.file_name()
.to_str()
.map(|s| s.starts_with("."))
.unwrap_or(false)
}

View File

@ -0,0 +1,6 @@
mod ast;
mod helper;
mod proto_gen;
mod template;
pub use proto_gen::*;

View File

@ -0,0 +1,176 @@
use crate::proto::ast::*;
use crate::proto::helper::*;
use crate::{proto::template::*, util::*};
use flowy_ast::*;
use std::{fs::OpenOptions, io::Write};
use syn::Item;
use walkdir::WalkDir;
pub struct ProtoGen {
rust_source_dir: Option<String>,
proto_file_output_dir: Option<String>,
rust_mod_dir: Option<String>,
flutter_mod_dir: Option<String>,
build_cache_dir: Option<String>,
}
impl ProtoGen {
pub fn new() -> Self {
ProtoGen {
rust_source_dir: None,
proto_file_output_dir: None,
rust_mod_dir: None,
flutter_mod_dir: None,
build_cache_dir: None,
}
}
pub fn set_rust_source_dir(mut self, dir: &str) -> Self {
self.rust_source_dir = Some(dir.to_string());
self
}
pub fn set_proto_file_output_dir(mut self, dir: &str) -> Self {
self.proto_file_output_dir = Some(dir.to_string());
self
}
pub fn set_rust_mod_dir(mut self, dir: &str) -> Self {
self.rust_mod_dir = Some(dir.to_string());
self
}
pub fn set_flutter_mod_dir(mut self, dir: &str) -> Self {
self.flutter_mod_dir = Some(dir.to_string());
self
}
pub fn set_build_cache_dir(mut self, build_cache_dir: &str) -> Self {
self.build_cache_dir = Some(build_cache_dir.to_string());
self
}
pub fn gen(&self) {
let infos = parse_crate_protobuf(
self.rust_source_dir.as_ref().unwrap().as_ref(),
self.proto_file_output_dir.as_ref().unwrap().as_ref(),
);
self.write_proto_files(&infos);
self.gen_derive(&infos);
self.update_rust_flowy_protobuf_mod_file(&infos);
}
fn gen_derive(&self, crate_infos: &Vec<CrateProtoInfo>) {
let file_proto_infos = crate_infos
.iter()
.map(|ref crate_info| &crate_info.files)
.flatten()
.collect::<Vec<&FileProtoInfo>>();
let structs: Vec<String> = file_proto_infos
.iter()
.map(|info| info.structs.clone())
.flatten()
.collect();
let enums: Vec<String> = file_proto_infos
.iter()
.map(|info| info.enums.clone())
.flatten()
.collect();
let derive_file = self.build_cache_dir.as_ref().unwrap().clone();
let mut derive_template = ProtobufDeriveCache::new(structs, enums);
let new_content = derive_template.render().unwrap();
let old_content = read_file(derive_file.as_ref()).unwrap();
if new_content.clone() == old_content {
return;
}
// println!("{}", diff_lines(&old_content, &new_content));
match OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&derive_file)
{
Ok(ref mut file) => {
file.write_all(new_content.as_bytes()).unwrap();
}
Err(err) => {
panic!("Failed to open log file: {}", err);
}
}
}
fn write_proto_files(&self, crate_infos: &Vec<CrateProtoInfo>) {
for crate_info in crate_infos {
crate_info.files.iter().for_each(|info| {
// let dir = format!(
// "{}/{}",
// self.proto_file_output_dir.as_ref().unwrap(),
// &crate_info.name,
// );
let dir = format!("{}", self.proto_file_output_dir.as_ref().unwrap(),);
if !std::path::Path::new(&dir).exists() {
std::fs::create_dir_all(&dir).unwrap();
}
let proto_file_path = format!("{}/{}.proto", dir, &info.file_name);
let new_content = info.generated_content.clone();
save_content_to_file_with_diff_prompt(
&new_content,
proto_file_path.as_ref(),
false,
);
});
}
}
fn update_rust_flowy_protobuf_mod_file(&self, crate_infos: &Vec<CrateProtoInfo>) {
for crate_info in crate_infos {
// let dir = format!(
// "{}/{}-pb",
// self.rust_mod_dir.as_ref().unwrap(),
// &crate_info.name,
// );
let dir = format!("{}/model", self.rust_mod_dir.as_ref().unwrap(),);
if !std::path::Path::new(&dir).exists() {
std::fs::create_dir_all(&dir).unwrap();
}
let mod_path = format!("{}/mod.rs", dir);
match OpenOptions::new()
.create(false)
.write(true)
.append(false)
.truncate(true)
.open(&mod_path)
{
Ok(ref mut file) => {
let mut mod_file_content = String::new();
for (_, file_name) in
WalkDir::new(self.proto_file_output_dir.as_ref().unwrap().clone())
.into_iter()
.filter_map(|e| e.ok())
.filter(|e| e.file_type().is_dir() == false)
.map(|e| {
(
e.path().to_str().unwrap().to_string(),
e.path().file_stem().unwrap().to_str().unwrap().to_string(),
)
})
{
let c = format!("\nmod {}; \npub use {}::*; \n", &file_name, &file_name);
mod_file_content.push_str(c.as_ref());
}
file.write_all(mod_file_content.as_bytes()).unwrap();
}
Err(err) => {
panic!("Failed to open file: {}", err);
}
}
}
}
}

View File

@ -0,0 +1,33 @@
use crate::util::get_tera;
use tera::{Context, Tera};
pub struct ProtobufDeriveCache {
context: Context,
structs: Vec<String>,
enums: Vec<String>,
}
#[allow(dead_code)]
impl ProtobufDeriveCache {
pub fn new(structs: Vec<String>, enums: Vec<String>) -> Self {
return ProtobufDeriveCache {
context: Context::new(),
structs,
enums,
};
}
pub fn render(&mut self) -> Option<String> {
self.context.insert("names", &self.structs);
self.context.insert("enums", &self.enums);
let tera = get_tera("build_cache");
match tera.render("derive_cache.tera", &self.context) {
Ok(r) => Some(r),
Err(e) => {
log::error!("{:?}", e);
None
}
}
}
}

View File

@ -0,0 +1,44 @@
pub enum TypeCategory {
Array,
Map,
Str,
Protobuf,
Bytes,
Enum,
Opt,
Primitive,
}
// auto generate, do not edit
pub fn category_from_str(type_str: &str) -> TypeCategory {
match type_str {
"Vec" => TypeCategory::Array,
"HashMap" => TypeCategory::Map,
"u8" => TypeCategory::Bytes,
"String" => TypeCategory::Str,
{%- for name in names -%}
{%- if loop.first %}
"{{ name }}"
{%- else %}
| "{{ name }}"
{%- endif -%}
{%- if loop.last %}
=> TypeCategory::Protobuf,
{%- endif %}
{%- endfor %}
{%- for enum in enums -%}
{%- if loop.first %}
"{{ enum }}"
{%- else %}
| "{{ enum }}"
{%- endif -%}
{%- if loop.last %}
=> TypeCategory::Enum,
{%- endif %}
{%- endfor %}
"Option" => TypeCategory::Opt,
_ => TypeCategory::Primitive,
}
}

View File

@ -0,0 +1,3 @@
mod derive_cache;
pub use derive_cache::*;

View File

@ -0,0 +1,5 @@
mod build_cache;
mod proto_file;
pub use build_cache::*;
pub use proto_file::*;

View File

@ -0,0 +1,5 @@
enum {{ enum_name }} {
{%- for item in items %}
{{ item }}
{%- endfor %}
}

View File

@ -0,0 +1,38 @@
use crate::proto::ast::FlowyEnum;
use crate::util::get_tera;
use tera::{Context, Tera};
pub struct EnumTemplate {
context: Context,
items: Vec<String>,
}
#[allow(dead_code)]
impl EnumTemplate {
pub fn new() -> Self {
return EnumTemplate {
context: Context::new(),
items: vec![],
};
}
pub fn set_message_enum(&mut self, flowy_enum: &FlowyEnum) {
self.context.insert("enum_name", &flowy_enum.name);
flowy_enum.attrs.iter().for_each(|item| {
self.items
.push(format!("{} = {};", item.attrs.name, item.attrs.value))
})
}
pub fn render(&mut self) -> Option<String> {
self.context.insert("items", &self.items);
let tera = get_tera("proto_file");
match tera.render("enum.tera", &self.context) {
Ok(r) => Some(r),
Err(e) => {
log::error!("{:?}", e);
None
}
}
}
}

View File

@ -0,0 +1,5 @@
mod enum_template;
mod struct_template;
pub use enum_template::*;
pub use struct_template::*;

View File

@ -0,0 +1,5 @@
message {{ struct_name }} {
{%- for field in fields %}
{{ field }}
{%- endfor %}
}

View File

@ -0,0 +1,96 @@
use crate::util::get_tera;
use flowy_ast::*;
use phf::phf_map;
use tera::{Context, Tera};
// Protobuf data type : https://developers.google.com/protocol-buffers/docs/proto3
static RUST_TYPE_MAP: phf::Map<&'static str, &'static str> = phf_map! {
"String" => "string",
"i64" => "int64",
"i32" => "int32",
"u64" => "uint64",
"u32" => "uint32",
"Vec" => "repeated",
"f64" => "double",
"HashMap" => "map",
};
pub struct StructTemplate {
context: Context,
fields: Vec<String>,
}
#[allow(dead_code)]
impl StructTemplate {
pub fn new() -> Self {
return StructTemplate {
context: Context::new(),
fields: vec![],
};
}
pub fn set_message_struct_name(&mut self, name: &str) {
self.context.insert("struct_name", name);
}
pub fn set_field(&mut self, field: &ASTField) {
// {{ field_type }} {{ field_name }} = {{index}};
let name = field.name().unwrap().to_string();
let index = field.attrs.pb_index().unwrap();
let ty: &str = &field.ty_as_str();
let mut mapped_ty: &str = ty;
if RUST_TYPE_MAP.contains_key(ty) {
mapped_ty = RUST_TYPE_MAP[ty];
}
match field.bracket_category {
Some(ref category) => match category {
BracketCategory::Opt => self.fields.push(format!(
"oneof one_of_{} {{ {} {} = {}; }};",
name, mapped_ty, name, index
)),
BracketCategory::Map((k, v)) => {
let key: &str = k;
let value: &str = v;
self.fields.push(format!(
// map<string, string> attrs = 1;
"map<{}, {}> {} = {};",
RUST_TYPE_MAP.get(key).unwrap_or(&key),
RUST_TYPE_MAP.get(value).unwrap_or(&value),
name,
index
));
}
BracketCategory::Vec => {
let bracket_ty: &str = &field.bracket_ty.as_ref().unwrap().to_string();
if mapped_ty == "u8" && bracket_ty == "Vec" {
self.fields.push(format!("bytes {} = {};", name, index))
} else {
self.fields.push(format!(
"{} {} {} = {};",
RUST_TYPE_MAP[bracket_ty], mapped_ty, name, index
))
}
}
BracketCategory::Other => self
.fields
.push(format!("{} {} = {};", mapped_ty, name, index)),
},
None => {}
}
}
pub fn render(&mut self) -> Option<String> {
self.context.insert("fields", &self.fields);
let tera = get_tera("proto_file");
match tera.render("struct.tera", &self.context) {
Ok(r) => Some(r),
Err(e) => {
log::error!("{:?}", e);
None
}
}
}
}

View File

@ -0,0 +1,107 @@
use console::Style;
use dialoguer::Confirm;
use similar::{ChangeTag, TextDiff};
use std::{
fs::{File, OpenOptions},
io::{Read, Write},
path::Path,
};
use tera::Tera;
pub fn read_file(path: &str) -> Option<String> {
let mut file = File::open(path).expect("Unable to open file");
let mut content = String::new();
match file.read_to_string(&mut content) {
Ok(_) => Some(content),
Err(e) => {
log::error!("{}, with error: {:?}", path, e);
Some("".to_string())
}
}
}
pub fn save_content_to_file_with_diff_prompt(content: &str, output_file: &str, force_write: bool) {
if Path::new(output_file).exists() {
let old_content = read_file(output_file).unwrap();
let new_content = content.to_owned();
let write_to_file = || match OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(output_file)
{
Ok(ref mut file) => {
file.write_all(new_content.as_bytes()).unwrap();
}
Err(err) => {
panic!("Failed to open log file: {}", err);
}
};
if new_content != old_content {
print_diff(old_content.clone(), new_content.clone());
if force_write {
write_to_file()
} else {
if Confirm::new().with_prompt("Override?").interact().unwrap() {
write_to_file()
} else {
log::info!("never mind then :(");
}
}
}
} else {
match OpenOptions::new()
.create(true)
.write(true)
.open(output_file)
{
Ok(ref mut file) => file.write_all(content.as_bytes()).unwrap(),
Err(err) => panic!("Open or create file fail: {}", err),
}
}
}
pub fn print_diff(old_content: String, new_content: String) {
let diff = TextDiff::from_lines(&old_content, &new_content);
for op in diff.ops() {
for change in diff.iter_changes(op) {
let (sign, style) = match change.tag() {
ChangeTag::Delete => ("-", Style::new().red()),
ChangeTag::Insert => ("+", Style::new().green()),
ChangeTag::Equal => (" ", Style::new()),
};
match change.tag() {
ChangeTag::Delete => {
print!("{}{}", style.apply_to(sign).bold(), style.apply_to(change));
}
ChangeTag::Insert => {
print!("{}{}", style.apply_to(sign).bold(), style.apply_to(change));
}
ChangeTag::Equal => {}
};
}
println!("---------------------------------------------------");
}
}
pub fn get_tera(directory: &str) -> Tera {
let mut root = "./scripts/flowy-tool/src/proto/template/".to_owned();
root.push_str(directory);
let root_absolute_path = std::fs::canonicalize(root)
.unwrap()
.as_path()
.display()
.to_string();
let template_path = format!("{}/**/*.tera", root_absolute_path);
match Tera::new(template_path.as_ref()) {
Ok(t) => t,
Err(e) => {
log::error!("Parsing error(s): {}", e);
::std::process::exit(1);
}
}
}

View File

@ -0,0 +1,3 @@
mod file;
pub use file::*;

View File

@ -0,0 +1,29 @@
[tasks.pb]
dependencies = ["gen_pb_file", "gen_rust_pb"]
[tasks.gen_pb_file]
script = [
"""
pb_gen_bin=${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/scripts/flowy-tool/Cargo.toml
rust_source=${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/rust-lib/
build_cache=${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/rust-lib/flowy-derive/src/derive_cache/derive_cache.rs
proto_file_output=${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/rust-lib/flowy-protobuf/define
rust_mod_dir=${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/rust-lib/flowy-protobuf/src/
flutter_mod_dir=${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/flutter-lib/packages/flowy_protobuf/lib/
cargo run --manifest-path ${pb_gen_bin} pb-gen --rust_source=${rust_source} --build_cache=${build_cache} --proto_file_output=${proto_file_output} --rust_mod_dir=${rust_mod_dir} --flutter_mod_dir=${flutter_mod_dir}
""",
]
script_runner = "@shell"
[tasks.gen_rust_pb]
script = [
"""
protoc --rust_out=${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/rust-lib/flowy-protobuf/src/model \
--proto_path=${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/rust-lib/flowy-protobuf/define \
${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/rust-lib/flowy-protobuf/define/*.proto
""",
]
script_runner = "@shell"