refactor: crates (#4258)

* chore: rename flowy-folder2 to flowy-folder

* chore: rename flowy-document2 to flowy-document

* chore: fix test

* chore: move lib-infra crate

* chore: remove shared-lib

* chore: fix clippy
This commit is contained in:
Nathan.fooo
2023-12-31 07:29:40 +08:00
committed by GitHub
parent 2f6a4f8c7b
commit 5facb61e23
460 changed files with 498 additions and 11141 deletions

View File

@ -0,0 +1,11 @@
[package]
name = "flowy-ast"
version = "0.1.0"
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
syn = { version = "1.0.109", features = ["extra-traits", "parsing", "derive", "full"]}
quote = "1.0"
proc-macro2 = "1.0"

View File

@ -0,0 +1,299 @@
#![allow(clippy::all)]
#![allow(unused_attributes)]
#![allow(unused_assignments)]
use crate::event_attrs::EventEnumAttrs;
use crate::node_attrs::NodeStructAttrs;
use crate::{
is_recognizable_field, ty_ext::*, ASTResult, PBAttrsContainer, PBStructAttrs, NODE_TYPE,
};
use proc_macro2::Ident;
use syn::Meta::NameValue;
use syn::{self, punctuated::Punctuated};
pub struct ASTContainer<'a> {
/// The struct or enum name (without generics).
pub ident: syn::Ident,
pub node_type: Option<String>,
/// Attributes on the structure.
pub pb_attrs: PBAttrsContainer,
/// The contents of the struct or enum.
pub data: ASTData<'a>,
}
impl<'a> ASTContainer<'a> {
pub fn from_ast(ast_result: &ASTResult, ast: &'a syn::DeriveInput) -> Option<ASTContainer<'a>> {
let attrs = PBAttrsContainer::from_ast(ast_result, ast);
// syn::DeriveInput
// 1. syn::DataUnion
// 2. syn::DataStruct
// 3. syn::DataEnum
let data = match &ast.data {
syn::Data::Struct(data) => {
// https://docs.rs/syn/1.0.48/syn/struct.DataStruct.html
let (style, fields) = struct_from_ast(ast_result, &data.fields);
ASTData::Struct(style, fields)
},
syn::Data::Union(_) => {
ast_result.error_spanned_by(ast, "Does not support derive for unions");
return None;
},
syn::Data::Enum(data) => {
// https://docs.rs/syn/1.0.48/syn/struct.DataEnum.html
ASTData::Enum(enum_from_ast(
ast_result,
&ast.ident,
&data.variants,
&ast.attrs,
))
},
};
let ident = ast.ident.clone();
let node_type = get_node_type(ast_result, &ident, &ast.attrs);
let item = ASTContainer {
ident,
pb_attrs: attrs,
node_type,
data,
};
Some(item)
}
}
pub enum ASTData<'a> {
Struct(ASTStyle, Vec<ASTField<'a>>),
Enum(Vec<ASTEnumVariant<'a>>),
}
impl<'a> ASTData<'a> {
pub fn all_fields(&'a self) -> Box<dyn Iterator<Item = &'a ASTField<'a>> + 'a> {
match self {
ASTData::Enum(variants) => {
Box::new(variants.iter().flat_map(|variant| variant.fields.iter()))
},
ASTData::Struct(_, fields) => Box::new(fields.iter()),
}
}
pub fn all_variants(&'a self) -> Box<dyn Iterator<Item = &'a EventEnumAttrs> + 'a> {
match self {
ASTData::Enum(variants) => {
let iter = variants.iter().map(|variant| &variant.attrs);
Box::new(iter)
},
ASTData::Struct(_, fields) => {
let iter = fields.iter().flat_map(|_| None);
Box::new(iter)
},
}
}
pub fn all_idents(&'a self) -> Box<dyn Iterator<Item = &'a syn::Ident> + 'a> {
match self {
ASTData::Enum(variants) => Box::new(variants.iter().map(|v| &v.ident)),
ASTData::Struct(_, fields) => {
let iter = fields.iter().flat_map(|f| match &f.member {
syn::Member::Named(ident) => Some(ident),
_ => None,
});
Box::new(iter)
},
}
}
}
/// A variant of an enum.
pub struct ASTEnumVariant<'a> {
pub ident: syn::Ident,
pub attrs: EventEnumAttrs,
pub style: ASTStyle,
pub fields: Vec<ASTField<'a>>,
pub original: &'a syn::Variant,
}
impl<'a> ASTEnumVariant<'a> {
pub fn name(&self) -> String {
self.ident.to_string()
}
}
pub enum BracketCategory {
Other,
Opt,
Vec,
Map((String, String)),
}
pub struct ASTField<'a> {
pub member: syn::Member,
pub pb_attrs: PBStructAttrs,
pub node_attrs: NodeStructAttrs,
pub ty: &'a syn::Type,
pub original: &'a syn::Field,
// If the field is Vec<String>, then the bracket_ty will be Vec
pub bracket_ty: Option<syn::Ident>,
// If the field is Vec<String>, then the bracket_inner_ty will be String
pub bracket_inner_ty: Option<syn::Ident>,
pub bracket_category: Option<BracketCategory>,
}
impl<'a> ASTField<'a> {
pub fn new(cx: &ASTResult, field: &'a syn::Field, index: usize) -> Result<Self, String> {
let mut bracket_inner_ty = None;
let mut bracket_ty = None;
let mut bracket_category = Some(BracketCategory::Other);
match parse_ty(cx, &field.ty) {
Ok(Some(inner)) => {
match inner.primitive_ty {
PrimitiveTy::Map(map_info) => {
bracket_category = Some(BracketCategory::Map((map_info.key.clone(), map_info.value)))
},
PrimitiveTy::Vec => {
bracket_category = Some(BracketCategory::Vec);
},
PrimitiveTy::Opt => {
bracket_category = Some(BracketCategory::Opt);
},
PrimitiveTy::Other => {
bracket_category = Some(BracketCategory::Other);
},
}
match *inner.bracket_ty_info {
Some(bracketed_inner_ty) => {
bracket_inner_ty = Some(bracketed_inner_ty.ident.clone());
bracket_ty = Some(inner.ident.clone());
},
None => {
bracket_ty = Some(inner.ident.clone());
},
}
},
Ok(None) => {
let msg = format!("Fail to get the ty inner type: {:?}", field);
return Err(msg);
},
Err(e) => {
eprintln!("ASTField parser failed: {:?} with error: {}", field, e);
return Err(e);
},
}
Ok(ASTField {
member: match &field.ident {
Some(ident) => syn::Member::Named(ident.clone()),
None => syn::Member::Unnamed(index.into()),
},
pb_attrs: PBStructAttrs::from_ast(cx, index, field),
node_attrs: NodeStructAttrs::from_ast(cx, index, field),
ty: &field.ty,
original: field,
bracket_ty,
bracket_inner_ty,
bracket_category,
})
}
pub fn ty_as_str(&self) -> String {
match self.bracket_inner_ty {
Some(ref ty) => ty.to_string(),
None => self.bracket_ty.as_ref().unwrap().clone().to_string(),
}
}
pub fn name(&self) -> Option<syn::Ident> {
if let syn::Member::Named(ident) = &self.member {
Some(ident.clone())
} else {
None
}
}
}
#[derive(Copy, Clone)]
pub enum ASTStyle {
Struct,
/// Many unnamed fields.
Tuple,
/// One unnamed field.
NewType,
/// No fields.
Unit,
}
pub fn struct_from_ast<'a>(
cx: &ASTResult,
fields: &'a syn::Fields,
) -> (ASTStyle, Vec<ASTField<'a>>) {
match fields {
syn::Fields::Named(fields) => (ASTStyle::Struct, fields_from_ast(cx, &fields.named)),
syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => {
(ASTStyle::NewType, fields_from_ast(cx, &fields.unnamed))
},
syn::Fields::Unnamed(fields) => (ASTStyle::Tuple, fields_from_ast(cx, &fields.unnamed)),
syn::Fields::Unit => (ASTStyle::Unit, Vec::new()),
}
}
pub fn enum_from_ast<'a>(
cx: &ASTResult,
ident: &syn::Ident,
variants: &'a Punctuated<syn::Variant, Token![,]>,
enum_attrs: &[syn::Attribute],
) -> Vec<ASTEnumVariant<'a>> {
variants
.iter()
.flat_map(|variant| {
let attrs = EventEnumAttrs::from_ast(cx, ident, variant, enum_attrs);
let (style, fields) = struct_from_ast(cx, &variant.fields);
Some(ASTEnumVariant {
ident: variant.ident.clone(),
attrs,
style,
fields,
original: variant,
})
})
.collect()
}
fn fields_from_ast<'a>(
cx: &ASTResult,
fields: &'a Punctuated<syn::Field, Token![,]>,
) -> Vec<ASTField<'a>> {
fields
.iter()
.enumerate()
.flat_map(|(index, field)| {
if is_recognizable_field(field) {
ASTField::new(cx, field, index).ok()
} else {
None
}
})
.collect()
}
fn get_node_type(
ast_result: &ASTResult,
struct_name: &Ident,
attrs: &[syn::Attribute],
) -> Option<String> {
let mut node_type = None;
attrs
.iter()
.filter(|attr| attr.path.segments.iter().any(|s| s.ident == NODE_TYPE))
.for_each(|attr| {
if let Ok(NameValue(named_value)) = attr.parse_meta() {
if node_type.is_some() {
ast_result.error_spanned_by(struct_name, "Duplicate node type definition");
}
if let syn::Lit::Str(s) = named_value.lit {
node_type = Some(s.value());
}
}
});
node_type
}

View File

@ -0,0 +1,44 @@
use quote::ToTokens;
use std::{cell::RefCell, fmt::Display, thread};
#[derive(Default)]
pub struct ASTResult {
errors: RefCell<Option<Vec<syn::Error>>>,
}
impl ASTResult {
pub fn new() -> Self {
ASTResult {
errors: RefCell::new(Some(Vec::new())),
}
}
pub fn error_spanned_by<A: ToTokens, T: Display>(&self, obj: A, msg: T) {
self
.errors
.borrow_mut()
.as_mut()
.unwrap()
.push(syn::Error::new_spanned(obj.into_token_stream(), msg));
}
pub fn syn_error(&self, err: syn::Error) {
self.errors.borrow_mut().as_mut().unwrap().push(err);
}
pub fn check(self) -> Result<(), Vec<syn::Error>> {
let errors = self.errors.borrow_mut().take().unwrap();
match errors.len() {
0 => Ok(()),
_ => Err(errors),
}
}
}
impl Drop for ASTResult {
fn drop(&mut self) {
if !thread::panicking() && self.errors.borrow().is_some() {
panic!("forgot to check for errors");
}
}
}

View File

@ -0,0 +1,150 @@
use crate::{get_event_meta_items, parse_lit_str, symbol::*, ASTResult};
use syn::{
self,
Meta::{NameValue, Path},
NestedMeta::{Lit, Meta},
};
#[derive(Debug, Clone)]
pub struct EventAttrs {
input: Option<syn::Path>,
output: Option<syn::Path>,
error_ty: Option<String>,
pub ignore: bool,
}
#[derive(Debug, Clone)]
pub struct EventEnumAttrs {
pub enum_name: String,
pub enum_item_name: String,
pub value: String,
pub event_attrs: EventAttrs,
}
impl EventEnumAttrs {
pub fn from_ast(
ast_result: &ASTResult,
ident: &syn::Ident,
variant: &syn::Variant,
enum_attrs: &[syn::Attribute],
) -> Self {
let enum_item_name = variant.ident.to_string();
let enum_name = ident.to_string();
let mut value = String::new();
if variant.discriminant.is_some() {
if let syn::Expr::Lit(ref expr_list) = variant.discriminant.as_ref().unwrap().1 {
let lit_int = if let syn::Lit::Int(ref int_value) = expr_list.lit {
int_value
} else {
unimplemented!()
};
value = lit_int.base10_digits().to_string();
}
}
let event_attrs = get_event_attrs_from(ast_result, &variant.attrs, enum_attrs);
EventEnumAttrs {
enum_name,
enum_item_name,
value,
event_attrs,
}
}
pub fn event_input(&self) -> Option<syn::Path> {
self.event_attrs.input.clone()
}
pub fn event_output(&self) -> Option<syn::Path> {
self.event_attrs.output.clone()
}
pub fn event_error(&self) -> String {
self.event_attrs.error_ty.as_ref().unwrap().clone()
}
}
fn get_event_attrs_from(
ast_result: &ASTResult,
variant_attrs: &[syn::Attribute],
enum_attrs: &[syn::Attribute],
) -> EventAttrs {
let mut event_attrs = EventAttrs {
input: None,
output: None,
error_ty: None,
ignore: false,
};
enum_attrs
.iter()
.filter(|attr| attr.path.segments.iter().any(|s| s.ident == EVENT_ERR))
.for_each(|attr| {
if let Ok(NameValue(named_value)) = attr.parse_meta() {
if let syn::Lit::Str(s) = named_value.lit {
event_attrs.error_ty = Some(s.value());
} else {
eprintln!("{} should not be empty", EVENT_ERR);
}
} else {
eprintln!("❌ Can not find any {} on attr: {:#?}", EVENT_ERR, attr);
}
});
let mut extract_event_attr = |attr: &syn::Attribute, meta_item: &syn::NestedMeta| match &meta_item
{
Meta(NameValue(name_value)) => {
if name_value.path == EVENT_INPUT {
if let syn::Lit::Str(s) = &name_value.lit {
let input_type = parse_lit_str(s)
.map_err(|_| {
ast_result.error_spanned_by(
s,
format!("failed to parse request deserializer {:?}", s.value()),
)
})
.unwrap();
event_attrs.input = Some(input_type);
}
}
if name_value.path == EVENT_OUTPUT {
if let syn::Lit::Str(s) = &name_value.lit {
let output_type = parse_lit_str(s)
.map_err(|_| {
ast_result.error_spanned_by(
s,
format!("failed to parse response deserializer {:?}", s.value()),
)
})
.unwrap();
event_attrs.output = Some(output_type);
}
}
},
Meta(Path(word)) => {
if word == EVENT_IGNORE && attr.path == EVENT {
event_attrs.ignore = true;
}
},
Lit(s) => ast_result.error_spanned_by(s, "unexpected attribute"),
_ => ast_result.error_spanned_by(meta_item, "unexpected attribute"),
};
let attr_meta_items_info = variant_attrs
.iter()
.flat_map(|attr| match get_event_meta_items(ast_result, attr) {
Ok(items) => Some((attr, items)),
Err(_) => None,
})
.collect::<Vec<(&syn::Attribute, Vec<syn::NestedMeta>)>>();
for (attr, nested_metas) in attr_meta_items_info {
nested_metas
.iter()
.for_each(|meta_item| extract_event_attr(attr, meta_item))
}
// eprintln!("😁{:#?}", event_attrs);
event_attrs
}

View File

@ -0,0 +1,17 @@
#[macro_use]
extern crate syn;
mod ast;
mod ctxt;
mod pb_attrs;
mod event_attrs;
mod node_attrs;
pub mod symbol;
pub mod ty_ext;
pub use self::{symbol::*, ty_ext::*};
pub use ast::*;
pub use ctxt::ASTResult;
pub use event_attrs::*;
pub use pb_attrs::*;

View File

@ -0,0 +1,106 @@
use crate::{get_node_meta_items, parse_lit_into_expr_path, symbol::*, ASTAttr, ASTResult};
use quote::ToTokens;
use syn::{
self, LitStr,
Meta::NameValue,
NestedMeta::{Lit, Meta},
};
pub struct NodeStructAttrs {
pub rename: Option<LitStr>,
pub has_child: bool,
pub child_name: Option<LitStr>,
pub child_index: Option<syn::LitInt>,
pub get_node_value_with: Option<syn::ExprPath>,
pub set_node_value_with: Option<syn::ExprPath>,
pub with_children: Option<syn::ExprPath>,
}
impl NodeStructAttrs {
/// Extract out the `#[node(...)]` attributes from a struct field.
pub fn from_ast(ast_result: &ASTResult, _index: usize, field: &syn::Field) -> Self {
let mut rename = ASTAttr::none(ast_result, RENAME_NODE);
let mut child_name = ASTAttr::none(ast_result, CHILD_NODE_NAME);
let mut child_index = ASTAttr::none(ast_result, CHILD_NODE_INDEX);
let mut get_node_value_with = ASTAttr::none(ast_result, GET_NODE_VALUE_WITH);
let mut set_node_value_with = ASTAttr::none(ast_result, SET_NODE_VALUE_WITH);
let mut with_children = ASTAttr::none(ast_result, WITH_CHILDREN);
for meta_item in field
.attrs
.iter()
.flat_map(|attr| get_node_meta_items(ast_result, attr))
.flatten()
{
match &meta_item {
// Parse '#[node(rename = x)]'
Meta(NameValue(m)) if m.path == RENAME_NODE => {
if let syn::Lit::Str(lit) = &m.lit {
rename.set(&m.path, lit.clone());
}
},
// Parse '#[node(child_name = x)]'
Meta(NameValue(m)) if m.path == CHILD_NODE_NAME => {
if let syn::Lit::Str(lit) = &m.lit {
child_name.set(&m.path, lit.clone());
}
},
// Parse '#[node(child_index = x)]'
Meta(NameValue(m)) if m.path == CHILD_NODE_INDEX => {
if let syn::Lit::Int(lit) = &m.lit {
child_index.set(&m.path, lit.clone());
}
},
// Parse `#[node(get_node_value_with = "...")]`
Meta(NameValue(m)) if m.path == GET_NODE_VALUE_WITH => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, GET_NODE_VALUE_WITH, &m.lit) {
get_node_value_with.set(&m.path, path);
}
},
// Parse `#[node(set_node_value_with= "...")]`
Meta(NameValue(m)) if m.path == SET_NODE_VALUE_WITH => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, SET_NODE_VALUE_WITH, &m.lit) {
set_node_value_with.set(&m.path, path);
}
},
// Parse `#[node(with_children= "...")]`
Meta(NameValue(m)) if m.path == WITH_CHILDREN => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, WITH_CHILDREN, &m.lit) {
with_children.set(&m.path, path);
}
},
Meta(meta_item) => {
let path = meta_item
.path()
.into_token_stream()
.to_string()
.replace(' ', "");
ast_result.error_spanned_by(
meta_item.path(),
format!("unknown node field attribute `{}`", path),
);
},
Lit(lit) => {
ast_result.error_spanned_by(lit, "unexpected literal in field attribute");
},
}
}
let child_name = child_name.get();
NodeStructAttrs {
rename: rename.get(),
child_index: child_index.get(),
has_child: child_name.is_some(),
child_name,
get_node_value_with: get_node_value_with.get(),
set_node_value_with: set_node_value_with.get(),
with_children: with_children.get(),
}
}
}

View File

@ -0,0 +1,489 @@
#![allow(clippy::all)]
use crate::{symbol::*, ASTResult};
use proc_macro2::{Group, Span, TokenStream, TokenTree};
use quote::ToTokens;
use syn::{
self,
parse::{self, Parse},
Meta::{List, NameValue, Path},
NestedMeta::{Lit, Meta},
};
#[allow(dead_code)]
pub struct PBAttrsContainer {
name: String,
pb_struct_type: Option<syn::Type>,
pb_enum_type: Option<syn::Type>,
}
impl PBAttrsContainer {
/// Extract out the `#[pb(...)]` attributes from an item.
pub fn from_ast(ast_result: &ASTResult, item: &syn::DeriveInput) -> Self {
let mut pb_struct_type = ASTAttr::none(ast_result, PB_STRUCT);
let mut pb_enum_type = ASTAttr::none(ast_result, PB_ENUM);
for meta_item in item
.attrs
.iter()
.flat_map(|attr| get_pb_meta_items(ast_result, attr))
.flatten()
{
match &meta_item {
// Parse `#[pb(struct = "Type")]
Meta(NameValue(m)) if m.path == PB_STRUCT => {
if let Ok(into_ty) = parse_lit_into_ty(ast_result, PB_STRUCT, &m.lit) {
pb_struct_type.set_opt(&m.path, Some(into_ty));
}
},
// Parse `#[pb(enum = "Type")]
Meta(NameValue(m)) if m.path == PB_ENUM => {
if let Ok(into_ty) = parse_lit_into_ty(ast_result, PB_ENUM, &m.lit) {
pb_enum_type.set_opt(&m.path, Some(into_ty));
}
},
Meta(meta_item) => {
let path = meta_item
.path()
.into_token_stream()
.to_string()
.replace(' ', "");
ast_result.error_spanned_by(
meta_item.path(),
format!("unknown container attribute `{}`", path),
);
},
Lit(lit) => {
ast_result.error_spanned_by(lit, "unexpected literal in container attribute");
},
}
}
match &item.data {
syn::Data::Struct(_) => {
pb_struct_type.set_if_none(default_pb_type(&ast_result, &item.ident));
},
syn::Data::Enum(_) => {
pb_enum_type.set_if_none(default_pb_type(&ast_result, &item.ident));
},
_ => {},
}
PBAttrsContainer {
name: item.ident.to_string(),
pb_struct_type: pb_struct_type.get(),
pb_enum_type: pb_enum_type.get(),
}
}
pub fn pb_struct_type(&self) -> Option<&syn::Type> {
self.pb_struct_type.as_ref()
}
pub fn pb_enum_type(&self) -> Option<&syn::Type> {
self.pb_enum_type.as_ref()
}
}
pub struct ASTAttr<'c, T> {
ast_result: &'c ASTResult,
name: Symbol,
tokens: TokenStream,
value: Option<T>,
}
impl<'c, T> ASTAttr<'c, T> {
pub(crate) fn none(ast_result: &'c ASTResult, name: Symbol) -> Self {
ASTAttr {
ast_result,
name,
tokens: TokenStream::new(),
value: None,
}
}
pub(crate) fn set<A: ToTokens>(&mut self, obj: A, value: T) {
let tokens = obj.into_token_stream();
if self.value.is_some() {
self
.ast_result
.error_spanned_by(tokens, format!("duplicate attribute `{}`", self.name));
} else {
self.tokens = tokens;
self.value = Some(value);
}
}
fn set_opt<A: ToTokens>(&mut self, obj: A, value: Option<T>) {
if let Some(value) = value {
self.set(obj, value);
}
}
pub(crate) fn set_if_none(&mut self, value: T) {
if self.value.is_none() {
self.value = Some(value);
}
}
pub(crate) fn get(self) -> Option<T> {
self.value
}
#[allow(dead_code)]
fn get_with_tokens(self) -> Option<(TokenStream, T)> {
match self.value {
Some(v) => Some((self.tokens, v)),
None => None,
}
}
}
pub struct PBStructAttrs {
#[allow(dead_code)]
name: String,
pb_index: Option<syn::LitInt>,
pb_one_of: bool,
skip_pb_serializing: bool,
skip_pb_deserializing: bool,
serialize_pb_with: Option<syn::ExprPath>,
deserialize_pb_with: Option<syn::ExprPath>,
}
pub fn is_recognizable_field(field: &syn::Field) -> bool {
field
.attrs
.iter()
.any(|attr| is_recognizable_attribute(attr))
}
impl PBStructAttrs {
/// Extract out the `#[pb(...)]` attributes from a struct field.
pub fn from_ast(ast_result: &ASTResult, index: usize, field: &syn::Field) -> Self {
let mut pb_index = ASTAttr::none(ast_result, PB_INDEX);
let mut pb_one_of = BoolAttr::none(ast_result, PB_ONE_OF);
let mut serialize_pb_with = ASTAttr::none(ast_result, SERIALIZE_PB_WITH);
let mut skip_pb_serializing = BoolAttr::none(ast_result, SKIP_PB_SERIALIZING);
let mut deserialize_pb_with = ASTAttr::none(ast_result, DESERIALIZE_PB_WITH);
let mut skip_pb_deserializing = BoolAttr::none(ast_result, SKIP_PB_DESERIALIZING);
let ident = match &field.ident {
Some(ident) => ident.to_string(),
None => index.to_string(),
};
for meta_item in field
.attrs
.iter()
.flat_map(|attr| get_pb_meta_items(ast_result, attr))
.flatten()
{
match &meta_item {
// Parse `#[pb(skip)]`
Meta(Path(word)) if word == SKIP => {
skip_pb_serializing.set_true(word);
skip_pb_deserializing.set_true(word);
},
// Parse '#[pb(index = x)]'
Meta(NameValue(m)) if m.path == PB_INDEX => {
if let syn::Lit::Int(lit) = &m.lit {
pb_index.set(&m.path, lit.clone());
}
},
// Parse `#[pb(one_of)]`
Meta(Path(path)) if path == PB_ONE_OF => {
pb_one_of.set_true(path);
},
// Parse `#[pb(serialize_pb_with = "...")]`
Meta(NameValue(m)) if m.path == SERIALIZE_PB_WITH => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, SERIALIZE_PB_WITH, &m.lit) {
serialize_pb_with.set(&m.path, path);
}
},
// Parse `#[pb(deserialize_pb_with = "...")]`
Meta(NameValue(m)) if m.path == DESERIALIZE_PB_WITH => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, DESERIALIZE_PB_WITH, &m.lit) {
deserialize_pb_with.set(&m.path, path);
}
},
Meta(meta_item) => {
let path = meta_item
.path()
.into_token_stream()
.to_string()
.replace(' ', "");
ast_result.error_spanned_by(
meta_item.path(),
format!("unknown pb field attribute `{}`", path),
);
},
Lit(lit) => {
ast_result.error_spanned_by(lit, "unexpected literal in field attribute");
},
}
}
PBStructAttrs {
name: ident,
pb_index: pb_index.get(),
pb_one_of: pb_one_of.get(),
skip_pb_serializing: skip_pb_serializing.get(),
skip_pb_deserializing: skip_pb_deserializing.get(),
serialize_pb_with: serialize_pb_with.get(),
deserialize_pb_with: deserialize_pb_with.get(),
}
}
#[allow(dead_code)]
pub fn pb_index(&self) -> Option<String> {
self
.pb_index
.as_ref()
.map(|lit| lit.base10_digits().to_string())
}
pub fn is_one_of(&self) -> bool {
self.pb_one_of
}
pub fn serialize_pb_with(&self) -> Option<&syn::ExprPath> {
self.serialize_pb_with.as_ref()
}
pub fn deserialize_pb_with(&self) -> Option<&syn::ExprPath> {
self.deserialize_pb_with.as_ref()
}
pub fn skip_pb_serializing(&self) -> bool {
self.skip_pb_serializing
}
pub fn skip_pb_deserializing(&self) -> bool {
self.skip_pb_deserializing
}
}
pub enum Default {
/// Field must always be specified because it does not have a default.
None,
/// The default is given by `std::default::Default::default()`.
Default,
/// The default is given by this function.
Path(syn::ExprPath),
}
pub fn is_recognizable_attribute(attr: &syn::Attribute) -> bool {
attr.path == PB_ATTRS || attr.path == EVENT || attr.path == NODE_ATTRS || attr.path == NODES_ATTRS
}
pub fn get_pb_meta_items(
cx: &ASTResult,
attr: &syn::Attribute,
) -> Result<Vec<syn::NestedMeta>, ()> {
// Only handle the attribute that we have defined
if attr.path != PB_ATTRS {
return Ok(vec![]);
}
// http://strymon.systems.ethz.ch/typename/syn/enum.Meta.html
match attr.parse_meta() {
Ok(List(meta)) => Ok(meta.nested.into_iter().collect()),
Ok(other) => {
cx.error_spanned_by(other, "expected #[pb(...)]");
Err(())
},
Err(err) => {
cx.error_spanned_by(attr, "attribute must be str, e.g. #[pb(xx = \"xxx\")]");
cx.syn_error(err);
Err(())
},
}
}
pub fn get_node_meta_items(
cx: &ASTResult,
attr: &syn::Attribute,
) -> Result<Vec<syn::NestedMeta>, ()> {
// Only handle the attribute that we have defined
if attr.path != NODE_ATTRS && attr.path != NODES_ATTRS {
return Ok(vec![]);
}
// http://strymon.systems.ethz.ch/typename/syn/enum.Meta.html
match attr.parse_meta() {
Ok(List(meta)) => Ok(meta.nested.into_iter().collect()),
Ok(_) => Ok(vec![]),
Err(err) => {
cx.error_spanned_by(attr, "attribute must be str, e.g. #[node(xx = \"xxx\")]");
cx.syn_error(err);
Err(())
},
}
}
pub fn get_event_meta_items(
cx: &ASTResult,
attr: &syn::Attribute,
) -> Result<Vec<syn::NestedMeta>, ()> {
// Only handle the attribute that we have defined
if attr.path != EVENT {
return Ok(vec![]);
}
// http://strymon.systems.ethz.ch/typename/syn/enum.Meta.html
match attr.parse_meta() {
Ok(List(meta)) => Ok(meta.nested.into_iter().collect()),
Ok(other) => {
cx.error_spanned_by(other, "expected #[event(...)]");
Err(())
},
Err(err) => {
cx.error_spanned_by(attr, "attribute must be str, e.g. #[event(xx = \"xxx\")]");
cx.syn_error(err);
Err(())
},
}
}
pub fn parse_lit_into_expr_path(
ast_result: &ASTResult,
attr_name: Symbol,
lit: &syn::Lit,
) -> Result<syn::ExprPath, ()> {
let string = get_lit_str(ast_result, attr_name, lit)?;
parse_lit_str(string).map_err(|_| {
ast_result.error_spanned_by(lit, format!("failed to parse path: {:?}", string.value()))
})
}
fn get_lit_str<'a>(
ast_result: &ASTResult,
attr_name: Symbol,
lit: &'a syn::Lit,
) -> Result<&'a syn::LitStr, ()> {
if let syn::Lit::Str(lit) = lit {
Ok(lit)
} else {
ast_result.error_spanned_by(
lit,
format!(
"expected pb {} attribute to be a string: `{} = \"...\"`",
attr_name, attr_name
),
);
Err(())
}
}
fn parse_lit_into_ty(
ast_result: &ASTResult,
attr_name: Symbol,
lit: &syn::Lit,
) -> Result<syn::Type, ()> {
let string = get_lit_str(ast_result, attr_name, lit)?;
parse_lit_str(string).map_err(|_| {
ast_result.error_spanned_by(
lit,
format!("failed to parse type: {} = {:?}", attr_name, string.value()),
)
})
}
pub fn parse_lit_str<T>(s: &syn::LitStr) -> parse::Result<T>
where
T: Parse,
{
let tokens = spanned_tokens(s)?;
syn::parse2(tokens)
}
fn spanned_tokens(s: &syn::LitStr) -> parse::Result<TokenStream> {
let stream = syn::parse_str(&s.value())?;
Ok(respan_token_stream(stream, s.span()))
}
fn respan_token_stream(stream: TokenStream, span: Span) -> TokenStream {
stream
.into_iter()
.map(|token| respan_token_tree(token, span))
.collect()
}
fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree {
if let TokenTree::Group(g) = &mut token {
*g = Group::new(g.delimiter(), respan_token_stream(g.stream(), span));
}
token.set_span(span);
token
}
fn default_pb_type(ast_result: &ASTResult, ident: &syn::Ident) -> syn::Type {
let take_ident = ident.to_string();
let lit_str = syn::LitStr::new(&take_ident, ident.span());
if let Ok(tokens) = spanned_tokens(&lit_str) {
if let Ok(pb_struct_ty) = syn::parse2(tokens) {
return pb_struct_ty;
}
}
ast_result.error_spanned_by(
ident,
format!("❌ Can't find {} protobuf struct", take_ident),
);
panic!()
}
#[allow(dead_code)]
pub fn is_option(ty: &syn::Type) -> bool {
let path = match ungroup(ty) {
syn::Type::Path(ty) => &ty.path,
_ => {
return false;
},
};
let seg = match path.segments.last() {
Some(seg) => seg,
None => {
return false;
},
};
let args = match &seg.arguments {
syn::PathArguments::AngleBracketed(bracketed) => &bracketed.args,
_ => {
return false;
},
};
seg.ident == "Option" && args.len() == 1
}
#[allow(dead_code)]
pub fn ungroup(mut ty: &syn::Type) -> &syn::Type {
while let syn::Type::Group(group) = ty {
ty = &group.elem;
}
ty
}
struct BoolAttr<'c>(ASTAttr<'c, ()>);
impl<'c> BoolAttr<'c> {
fn none(ast_result: &'c ASTResult, name: Symbol) -> Self {
BoolAttr(ASTAttr::none(ast_result, name))
}
fn set_true<A: ToTokens>(&mut self, obj: A) {
self.0.set(obj, ());
}
fn get(&self) -> bool {
self.0.value.is_some()
}
}

View File

@ -0,0 +1,78 @@
use std::fmt::{self, Display};
use syn::{Ident, Path};
#[derive(Copy, Clone)]
pub struct Symbol(&'static str);
// Protobuf
pub const PB_ATTRS: Symbol = Symbol("pb");
//#[pb(skip)]
pub const SKIP: Symbol = Symbol("skip");
//#[pb(index = "1")]
pub const PB_INDEX: Symbol = Symbol("index");
//#[pb(one_of)]
pub const PB_ONE_OF: Symbol = Symbol("one_of");
//#[pb(skip_pb_deserializing = "...")]
pub const SKIP_PB_DESERIALIZING: Symbol = Symbol("skip_pb_deserializing");
//#[pb(skip_pb_serializing)]
pub const SKIP_PB_SERIALIZING: Symbol = Symbol("skip_pb_serializing");
//#[pb(serialize_pb_with = "...")]
pub const SERIALIZE_PB_WITH: Symbol = Symbol("serialize_pb_with");
//#[pb(deserialize_pb_with = "...")]
pub const DESERIALIZE_PB_WITH: Symbol = Symbol("deserialize_pb_with");
//#[pb(struct="some struct")]
pub const PB_STRUCT: Symbol = Symbol("struct");
//#[pb(enum="some enum")]
pub const PB_ENUM: Symbol = Symbol("enum");
// Event
pub const EVENT_INPUT: Symbol = Symbol("input");
pub const EVENT_OUTPUT: Symbol = Symbol("output");
pub const EVENT_IGNORE: Symbol = Symbol("ignore");
pub const EVENT: Symbol = Symbol("event");
pub const EVENT_ERR: Symbol = Symbol("event_err");
// Node
pub const NODE_ATTRS: Symbol = Symbol("node");
pub const NODES_ATTRS: Symbol = Symbol("nodes");
pub const NODE_TYPE: Symbol = Symbol("node_type");
pub const NODE_INDEX: Symbol = Symbol("index");
pub const RENAME_NODE: Symbol = Symbol("rename");
pub const CHILD_NODE_NAME: Symbol = Symbol("child_name");
pub const CHILD_NODE_INDEX: Symbol = Symbol("child_index");
pub const SKIP_NODE_ATTRS: Symbol = Symbol("skip_node_attribute");
pub const GET_NODE_VALUE_WITH: Symbol = Symbol("get_value_with");
pub const SET_NODE_VALUE_WITH: Symbol = Symbol("set_value_with");
pub const GET_VEC_ELEMENT_WITH: Symbol = Symbol("get_element_with");
pub const GET_MUT_VEC_ELEMENT_WITH: Symbol = Symbol("get_mut_element_with");
pub const WITH_CHILDREN: Symbol = Symbol("with_children");
impl PartialEq<Symbol> for Ident {
fn eq(&self, word: &Symbol) -> bool {
self == word.0
}
}
impl<'a> PartialEq<Symbol> for &'a Ident {
fn eq(&self, word: &Symbol) -> bool {
*self == word.0
}
}
impl PartialEq<Symbol> for Path {
fn eq(&self, word: &Symbol) -> bool {
self.is_ident(word.0)
}
}
impl<'a> PartialEq<Symbol> for &'a Path {
fn eq(&self, word: &Symbol) -> bool {
self.is_ident(word.0)
}
}
impl Display for Symbol {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str(self.0)
}
}

View File

@ -0,0 +1,156 @@
use crate::ASTResult;
use syn::{self, AngleBracketedGenericArguments, PathSegment};
#[derive(Eq, PartialEq, Debug)]
pub enum PrimitiveTy {
Map(MapInfo),
Vec,
Opt,
Other,
}
#[derive(Debug)]
pub struct TyInfo<'a> {
pub ident: &'a syn::Ident,
pub ty: &'a syn::Type,
pub primitive_ty: PrimitiveTy,
pub bracket_ty_info: Box<Option<TyInfo<'a>>>,
}
#[derive(Debug, Eq, PartialEq)]
pub struct MapInfo {
pub key: String,
pub value: String,
}
impl MapInfo {
fn new(key: String, value: String) -> Self {
MapInfo { key, value }
}
}
impl<'a> TyInfo<'a> {
#[allow(dead_code)]
pub fn bracketed_ident(&'a self) -> &'a syn::Ident {
match self.bracket_ty_info.as_ref() {
Some(b_ty) => b_ty.ident,
None => {
panic!()
},
}
}
}
pub fn parse_ty<'a>(
ast_result: &ASTResult,
ty: &'a syn::Type,
) -> Result<Option<TyInfo<'a>>, String> {
// Type -> TypePath -> Path -> PathSegment -> PathArguments ->
// AngleBracketedGenericArguments -> GenericArgument -> Type.
if let syn::Type::Path(ref p) = ty {
if p.path.segments.len() != 1 {
return Ok(None);
}
let seg = match p.path.segments.last() {
Some(seg) => seg,
None => return Ok(None),
};
let _is_option = seg.ident == "Option";
return if let syn::PathArguments::AngleBracketed(ref bracketed) = seg.arguments {
match seg.ident.to_string().as_ref() {
"HashMap" => generate_hashmap_ty_info(ast_result, ty, seg, bracketed),
"Vec" => generate_vec_ty_info(ast_result, seg, bracketed),
"Option" => generate_option_ty_info(ast_result, ty, seg, bracketed),
_ => {
let msg = format!("Unsupported type: {}", seg.ident);
ast_result.error_spanned_by(&seg.ident, &msg);
return Err(msg);
},
}
} else {
return Ok(Some(TyInfo {
ident: &seg.ident,
ty,
primitive_ty: PrimitiveTy::Other,
bracket_ty_info: Box::new(None),
}));
};
}
Err("Unsupported inner type, get inner type fail".to_string())
}
fn parse_bracketed(bracketed: &AngleBracketedGenericArguments) -> Vec<&syn::Type> {
bracketed
.args
.iter()
.flat_map(|arg| {
if let syn::GenericArgument::Type(ref ty_in_bracket) = arg {
Some(ty_in_bracket)
} else {
None
}
})
.collect::<Vec<&syn::Type>>()
}
pub fn generate_hashmap_ty_info<'a>(
ast_result: &ASTResult,
ty: &'a syn::Type,
path_segment: &'a PathSegment,
bracketed: &'a AngleBracketedGenericArguments,
) -> Result<Option<TyInfo<'a>>, String> {
// The args of map must greater than 2
if bracketed.args.len() != 2 {
return Ok(None);
}
let types = parse_bracketed(bracketed);
let key = parse_ty(ast_result, types[0])?.unwrap().ident.to_string();
let value = parse_ty(ast_result, types[1])?.unwrap().ident.to_string();
let bracket_ty_info = Box::new(parse_ty(ast_result, types[1])?);
Ok(Some(TyInfo {
ident: &path_segment.ident,
ty,
primitive_ty: PrimitiveTy::Map(MapInfo::new(key, value)),
bracket_ty_info,
}))
}
fn generate_option_ty_info<'a>(
ast_result: &ASTResult,
ty: &'a syn::Type,
path_segment: &'a PathSegment,
bracketed: &'a AngleBracketedGenericArguments,
) -> Result<Option<TyInfo<'a>>, String> {
assert_eq!(path_segment.ident.to_string(), "Option".to_string());
let types = parse_bracketed(bracketed);
let bracket_ty_info = Box::new(parse_ty(ast_result, types[0])?);
Ok(Some(TyInfo {
ident: &path_segment.ident,
ty,
primitive_ty: PrimitiveTy::Opt,
bracket_ty_info,
}))
}
fn generate_vec_ty_info<'a>(
ast_result: &ASTResult,
path_segment: &'a PathSegment,
bracketed: &'a AngleBracketedGenericArguments,
) -> Result<Option<TyInfo<'a>>, String> {
if bracketed.args.len() != 1 {
return Ok(None);
}
if let syn::GenericArgument::Type(ref bracketed_type) = bracketed.args.first().unwrap() {
let bracketed_ty_info = Box::new(parse_ty(ast_result, bracketed_type)?);
return Ok(Some(TyInfo {
ident: &path_segment.ident,
ty: bracketed_type,
primitive_ty: PrimitiveTy::Vec,
bracket_ty_info: bracketed_ty_info,
}));
}
Ok(None)
}

View File

@ -0,0 +1,49 @@
[package]
name = "flowy-codegen"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
log = "0.4.17"
serde = { version = "1.0", features = ["derive"]}
serde_json.workspace = true
flowy-ast.workspace = true
quote = "1.0"
cmd_lib = { version = "1.3.0", optional = true }
protoc-rust = { version = "2", optional = true }
walkdir = { version = "2", optional = true }
similar = { version = "1.3.0", optional = true }
syn = { version = "1.0.109", features = ["extra-traits", "parsing", "derive", "full"] }
fancy-regex = { version = "0.10.0", optional = true }
lazy_static = { version = "1.4.0", optional = true }
tera = { version = "1.17.1", optional = true}
itertools = { version = "0.10", optional = true }
phf = { version = "0.8.0", features = ["macros"], optional = true }
console = {version = "0.14.1", optional = true}
protoc-bin-vendored = { version = "3.0", optional = true }
toml = {version = "0.5.11", optional = true}
[features]
proto_gen = [
"similar",
"fancy-regex",
"lazy_static",
"tera",
"itertools",
"phf",
"walkdir",
"console",
"toml",
"cmd_lib",
"protoc-rust",
"walkdir",
"protoc-bin-vendored",
]
dart_event = ["walkdir", "tera", ]
dart = ["proto_gen", "dart_event"]
ts_event = ["walkdir", "tera", ]
ts = ["proto_gen", "ts_event"]

View File

@ -0,0 +1,41 @@
use flowy_ast::EventEnumAttrs;
use quote::format_ident;
#[allow(dead_code)]
pub struct EventASTContext {
pub event: syn::Ident,
pub event_ty: syn::Ident,
pub event_request_struct: syn::Ident,
pub event_input: Option<syn::Path>,
pub event_output: Option<syn::Path>,
pub event_error: String,
}
impl EventASTContext {
#[allow(dead_code)]
pub fn from(enum_attrs: &EventEnumAttrs) -> EventASTContext {
let command_name = enum_attrs.enum_item_name.clone();
if command_name.is_empty() {
panic!("Invalid command name: {}", enum_attrs.enum_item_name);
}
let event = format_ident!("{}", &command_name);
let splits = command_name.split('_').collect::<Vec<&str>>();
let event_ty = format_ident!("{}", enum_attrs.enum_name);
let event_request_struct = format_ident!("{}Event", &splits.join(""));
let event_input = enum_attrs.event_input();
let event_output = enum_attrs.event_output();
let event_error = enum_attrs.event_error();
EventASTContext {
event,
event_ty,
event_request_struct,
event_input,
event_output,
event_error,
}
}
}

View File

@ -0,0 +1,180 @@
use std::fs::File;
use std::io::Write;
use std::path::PathBuf;
use syn::Item;
use walkdir::WalkDir;
use flowy_ast::ASTResult;
use crate::ast::EventASTContext;
use crate::flowy_toml::{parse_crate_config_from, CrateConfig};
use crate::util::{is_crate_dir, is_hidden, path_string_with_component, read_file};
use super::event_template::*;
pub fn gen(crate_name: &str) {
if std::env::var("CARGO_MAKE_WORKING_DIRECTORY").is_err() {
println!("CARGO_MAKE_WORKING_DIRECTORY was not set, skip generate dart pb");
return;
}
if std::env::var("FLUTTER_FLOWY_SDK_PATH").is_err() {
println!("FLUTTER_FLOWY_SDK_PATH was not set, skip generate dart pb");
return;
}
let crate_path = std::fs::canonicalize(".")
.unwrap()
.as_path()
.display()
.to_string();
let event_crates = parse_dart_event_files(vec![crate_path]);
let event_ast = event_crates
.iter()
.flat_map(parse_event_crate)
.collect::<Vec<_>>();
let event_render_ctx = ast_to_event_render_ctx(event_ast.as_ref());
let mut render_result = DART_IMPORTED.to_owned();
for (index, render_ctx) in event_render_ctx.into_iter().enumerate() {
let mut event_template = EventTemplate::new();
if let Some(content) = event_template.render(render_ctx, index) {
render_result.push_str(content.as_ref())
}
}
let dart_event_folder: PathBuf = [
&std::env::var("CARGO_MAKE_WORKING_DIRECTORY").unwrap(),
&std::env::var("FLUTTER_FLOWY_SDK_PATH").unwrap(),
"lib",
"dispatch",
"dart_event",
crate_name,
]
.iter()
.collect();
if !dart_event_folder.as_path().exists() {
std::fs::create_dir_all(dart_event_folder.as_path()).unwrap();
}
let dart_event_file_path =
path_string_with_component(&dart_event_folder, vec!["dart_event.dart"]);
println!("cargo:rerun-if-changed={}", dart_event_file_path);
match std::fs::OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&dart_event_file_path)
{
Ok(ref mut file) => {
file.write_all(render_result.as_bytes()).unwrap();
File::flush(file).unwrap();
},
Err(err) => {
panic!("Failed to open file: {}, {:?}", dart_event_file_path, err);
},
}
}
const DART_IMPORTED: &str = r#"
/// Auto generate. Do not edit
part of '../../dispatch.dart';
"#;
#[derive(Debug)]
pub struct DartEventCrate {
crate_path: PathBuf,
event_files: Vec<String>,
}
impl DartEventCrate {
pub fn from_config(config: &CrateConfig) -> Self {
DartEventCrate {
crate_path: config.crate_path.clone(),
event_files: config.flowy_config.event_files.clone(),
}
}
}
pub fn parse_dart_event_files(crate_paths: Vec<String>) -> Vec<DartEventCrate> {
let mut dart_event_crates: Vec<DartEventCrate> = vec![];
crate_paths.iter().for_each(|path| {
let crates = WalkDir::new(path)
.into_iter()
.filter_entry(|e| !is_hidden(e))
.filter_map(|e| e.ok())
.filter(is_crate_dir)
.flat_map(|e| parse_crate_config_from(&e))
.map(|crate_config| DartEventCrate::from_config(&crate_config))
.collect::<Vec<DartEventCrate>>();
dart_event_crates.extend(crates);
});
dart_event_crates
}
pub fn parse_event_crate(event_crate: &DartEventCrate) -> Vec<EventASTContext> {
event_crate
.event_files
.iter()
.flat_map(|event_file| {
let file_path =
path_string_with_component(&event_crate.crate_path, vec![event_file.as_str()]);
let file_content = read_file(file_path.as_ref()).unwrap();
let ast = syn::parse_file(file_content.as_ref()).expect("Unable to parse file");
ast
.items
.iter()
.flat_map(|item| match item {
Item::Enum(item_enum) => {
let ast_result = ASTResult::new();
let attrs = flowy_ast::enum_from_ast(
&ast_result,
&item_enum.ident,
&item_enum.variants,
&item_enum.attrs,
);
ast_result.check().unwrap();
attrs
.iter()
.filter(|attr| !attr.attrs.event_attrs.ignore)
.enumerate()
.map(|(_index, variant)| EventASTContext::from(&variant.attrs))
.collect::<Vec<_>>()
},
_ => vec![],
})
.collect::<Vec<_>>()
})
.collect::<Vec<EventASTContext>>()
}
pub fn ast_to_event_render_ctx(ast: &[EventASTContext]) -> Vec<EventRenderContext> {
ast
.iter()
.map(|event_ast| {
let input_deserializer = event_ast
.event_input
.as_ref()
.map(|event_input| event_input.get_ident().unwrap().to_string());
let output_deserializer = event_ast
.event_output
.as_ref()
.map(|event_output| event_output.get_ident().unwrap().to_string());
EventRenderContext {
input_deserializer,
output_deserializer,
error_deserializer: event_ast.event_error.clone(),
event: event_ast.event.to_string(),
event_ty: event_ast.event_ty.to_string(),
}
})
.collect::<Vec<EventRenderContext>>()
}

View File

@ -0,0 +1,65 @@
use crate::util::get_tera;
use tera::Context;
pub struct EventTemplate {
tera_context: Context,
}
pub struct EventRenderContext {
pub input_deserializer: Option<String>,
pub output_deserializer: Option<String>,
pub error_deserializer: String,
pub event: String,
pub event_ty: String,
}
#[allow(dead_code)]
impl EventTemplate {
pub fn new() -> Self {
EventTemplate {
tera_context: Context::new(),
}
}
pub fn render(&mut self, ctx: EventRenderContext, index: usize) -> Option<String> {
self.tera_context.insert("index", &index);
let dart_class_name = format!("{}{}", ctx.event_ty, ctx.event);
let event = format!("{}.{}", ctx.event_ty, ctx.event);
self.tera_context.insert("event_class", &dart_class_name);
self.tera_context.insert("event", &event);
self
.tera_context
.insert("has_input", &ctx.input_deserializer.is_some());
match ctx.input_deserializer {
None => self.tera_context.insert("input_deserializer", "Unit"),
Some(ref input) => self.tera_context.insert("input_deserializer", input),
}
// eprintln!(
// "😁 {:?} / {:?}",
// &ctx.input_deserializer, &ctx.output_deserializer
// );
let has_output = ctx.output_deserializer.is_some();
self.tera_context.insert("has_output", &has_output);
match ctx.output_deserializer {
None => self.tera_context.insert("output_deserializer", "Unit"),
Some(ref output) => self.tera_context.insert("output_deserializer", output),
}
self
.tera_context
.insert("error_deserializer", &ctx.error_deserializer);
let tera = get_tera("dart_event");
match tera.render("event_template.tera", &self.tera_context) {
Ok(r) => Some(r),
Err(e) => {
log::error!("{:?}", e);
None
},
}
}
}

View File

@ -0,0 +1,45 @@
class {{ event_class }} {
{%- if has_input %}
{{ input_deserializer }} request;
{{ event_class }}(this.request);
{%- else %}
{{ event_class }}();
{%- endif %}
Future<Either<{{ output_deserializer }}, {{ error_deserializer }}>> send() {
{%- if has_input %}
final request = FFIRequest.create()
..event = {{ event }}.toString()
..payload = requestToBytes(this.request);
return Dispatch.asyncRequest(request)
.then((bytesResult) => bytesResult.fold(
{%- if has_output %}
(okBytes) => left({{ output_deserializer }}.fromBuffer(okBytes)),
{%- else %}
(bytes) => left(unit),
{%- endif %}
(errBytes) => right({{ error_deserializer }}.fromBuffer(errBytes)),
));
{%- else %}
final request = FFIRequest.create()
..event = {{ event }}.toString();
{%- if has_input %}
..payload = bytes;
{%- endif %}
return Dispatch.asyncRequest(request).then((bytesResult) => bytesResult.fold(
{%- if has_output %}
(okBytes) => left({{ output_deserializer }}.fromBuffer(okBytes)),
{%- else %}
(bytes) => left(unit),
{%- endif %}
(errBytes) => right({{ error_deserializer }}.fromBuffer(errBytes)),
));
{%- endif %}
}
}

View File

@ -0,0 +1,6 @@
#![allow(clippy::module_inception)]
mod dart_event;
mod event_template;
pub use dart_event::*;

View File

@ -0,0 +1,68 @@
use std::fs;
use std::path::{Path, PathBuf};
#[derive(serde::Deserialize, Clone, Debug)]
pub struct FlowyConfig {
#[serde(default)]
pub event_files: Vec<String>,
// Collect AST from the file or directory specified by proto_input to generate the proto files.
#[serde(default)]
pub proto_input: Vec<String>,
// Output path for the generated proto files. The default value is default_proto_output()
#[serde(default = "default_proto_output")]
pub proto_output: String,
// Create a crate that stores the generated protobuf Rust structures. The default value is default_protobuf_crate()
#[serde(default = "default_protobuf_crate")]
pub protobuf_crate_path: String,
}
fn default_proto_output() -> String {
let mut path = PathBuf::from("resources");
path.push("proto");
path.to_str().unwrap().to_owned()
}
fn default_protobuf_crate() -> String {
let mut path = PathBuf::from("src");
path.push("protobuf");
path.to_str().unwrap().to_owned()
}
impl FlowyConfig {
pub fn from_toml_file(path: &Path) -> Self {
let content = fs::read_to_string(path).unwrap();
let config: FlowyConfig = toml::from_str(content.as_ref()).unwrap();
config
}
}
pub struct CrateConfig {
pub crate_path: PathBuf,
pub crate_folder: String,
pub flowy_config: FlowyConfig,
}
pub fn parse_crate_config_from(entry: &walkdir::DirEntry) -> Option<CrateConfig> {
let mut config_path = entry.path().parent().unwrap().to_path_buf();
config_path.push("Flowy.toml");
if !config_path.as_path().exists() {
return None;
}
let crate_path = entry.path().parent().unwrap().to_path_buf();
let flowy_config = FlowyConfig::from_toml_file(config_path.as_path());
let crate_folder = crate_path
.file_stem()
.unwrap()
.to_str()
.unwrap()
.to_string();
Some(CrateConfig {
crate_path,
crate_folder,
flowy_config,
})
}

View File

@ -0,0 +1,21 @@
#[cfg(feature = "proto_gen")]
pub mod protobuf_file;
#[cfg(feature = "dart_event")]
pub mod dart_event;
#[cfg(feature = "ts_event")]
pub mod ts_event;
#[cfg(any(feature = "proto_gen", feature = "dart_event", feature = "ts_event"))]
mod flowy_toml;
pub(crate) mod ast;
#[cfg(any(feature = "proto_gen", feature = "dart_event", feature = "ts_event"))]
pub mod util;
#[derive(serde::Serialize, serde::Deserialize)]
pub struct ProtoCache {
pub structs: Vec<String>,
pub enums: Vec<String>,
}

View File

@ -0,0 +1,203 @@
#![allow(unused_attributes)]
#![allow(dead_code)]
#![allow(unused_imports)]
#![allow(unused_results)]
use crate::protobuf_file::template::{EnumTemplate, StructTemplate, RUST_TYPE_MAP};
use crate::protobuf_file::{parse_crate_info_from_path, ProtoFile, ProtobufCrateContext};
use crate::util::*;
use fancy_regex::Regex;
use flowy_ast::*;
use lazy_static::lazy_static;
use std::path::PathBuf;
use std::{fs::File, io::Read, path::Path};
use syn::Item;
use walkdir::WalkDir;
pub fn parse_protobuf_context_from(crate_paths: Vec<String>) -> Vec<ProtobufCrateContext> {
let crate_infos = parse_crate_info_from_path(crate_paths);
crate_infos
.into_iter()
.map(|crate_info| {
let proto_output_path = crate_info.proto_output_path();
let files = crate_info
.proto_input_paths()
.iter()
.flat_map(|proto_crate_path| parse_files_protobuf(proto_crate_path, &proto_output_path))
.collect::<Vec<ProtoFile>>();
ProtobufCrateContext::from_crate_info(crate_info, files)
})
.collect::<Vec<ProtobufCrateContext>>()
}
fn parse_files_protobuf(proto_crate_path: &Path, proto_output_path: &Path) -> Vec<ProtoFile> {
let mut gen_proto_vec: Vec<ProtoFile> = vec![];
// file_stem https://doc.rust-lang.org/std/path/struct.Path.html#method.file_stem
for (path, file_name) in WalkDir::new(proto_crate_path)
.into_iter()
.filter_entry(|e| !is_hidden(e))
.filter_map(|e| e.ok())
.filter(|e| !e.file_type().is_dir())
.map(|e| {
let path = e.path().to_str().unwrap().to_string();
let file_name = e.path().file_stem().unwrap().to_str().unwrap().to_string();
(path, file_name)
})
{
if file_name == "mod" {
continue;
}
// https://docs.rs/syn/1.0.54/syn/struct.File.html
let ast = syn::parse_file(read_file(&path).unwrap().as_ref())
.unwrap_or_else(|_| panic!("Unable to parse file at {}", path));
let structs = get_ast_structs(&ast);
let proto_file = format!("{}.proto", &file_name);
let proto_file_path = path_string_with_component(proto_output_path, vec![&proto_file]);
let proto_syntax = find_proto_syntax(proto_file_path.as_ref());
let mut proto_content = String::new();
// The types that are not defined in the current file.
let mut ref_types: Vec<String> = vec![];
structs.iter().for_each(|s| {
let mut struct_template = StructTemplate::new();
struct_template.set_message_struct_name(&s.name);
s.fields
.iter()
.filter(|field| field.pb_attrs.pb_index().is_some())
.for_each(|field| {
ref_types.push(field.ty_as_str());
struct_template.set_field(field);
});
let s = struct_template.render().unwrap();
proto_content.push_str(s.as_ref());
proto_content.push('\n');
});
let enums = get_ast_enums(&ast);
enums.iter().for_each(|e| {
let mut enum_template = EnumTemplate::new();
enum_template.set_message_enum(e);
let s = enum_template.render().unwrap();
proto_content.push_str(s.as_ref());
ref_types.push(e.name.clone());
proto_content.push('\n');
});
if !enums.is_empty() || !structs.is_empty() {
let structs: Vec<String> = structs.iter().map(|s| s.name.clone()).collect();
let enums: Vec<String> = enums.iter().map(|e| e.name.clone()).collect();
ref_types.retain(|s| !structs.contains(s));
ref_types.retain(|s| !enums.contains(s));
let info = ProtoFile {
file_path: path.clone(),
file_name: file_name.clone(),
ref_types,
structs,
enums,
syntax: proto_syntax,
content: proto_content,
};
gen_proto_vec.push(info);
}
}
gen_proto_vec
}
pub fn get_ast_structs(ast: &syn::File) -> Vec<Struct> {
// let mut content = format!("{:#?}", &ast);
// let mut file = File::create("./foo.txt").unwrap();
// file.write_all(content.as_bytes()).unwrap();
let ast_result = ASTResult::new();
let mut proto_structs: Vec<Struct> = vec![];
ast.items.iter().for_each(|item| {
if let Item::Struct(item_struct) = item {
let (_, fields) = struct_from_ast(&ast_result, &item_struct.fields);
if fields
.iter()
.filter(|f| f.pb_attrs.pb_index().is_some())
.count()
> 0
{
proto_structs.push(Struct {
name: item_struct.ident.to_string(),
fields,
});
}
}
});
ast_result.check().unwrap();
proto_structs
}
pub fn get_ast_enums(ast: &syn::File) -> Vec<FlowyEnum> {
let mut flowy_enums: Vec<FlowyEnum> = vec![];
let ast_result = ASTResult::new();
ast.items.iter().for_each(|item| {
// https://docs.rs/syn/1.0.54/syn/enum.Item.html
if let Item::Enum(item_enum) = item {
let attrs = flowy_ast::enum_from_ast(
&ast_result,
&item_enum.ident,
&item_enum.variants,
&ast.attrs,
);
flowy_enums.push(FlowyEnum {
name: item_enum.ident.to_string(),
attrs,
});
}
});
ast_result.check().unwrap();
flowy_enums
}
pub struct FlowyEnum<'a> {
pub name: String,
pub attrs: Vec<ASTEnumVariant<'a>>,
}
pub struct Struct<'a> {
pub name: String,
pub fields: Vec<ASTField<'a>>,
}
lazy_static! {
static ref SYNTAX_REGEX: Regex = Regex::new("syntax.*;").unwrap();
// static ref IMPORT_REGEX: Regex = Regex::new("(import\\s).*;").unwrap();
}
fn find_proto_syntax(path: &str) -> String {
if !Path::new(path).exists() {
return String::from("syntax = \"proto3\";\n");
}
let mut result = String::new();
let mut file = File::open(path).unwrap();
let mut content = String::new();
file.read_to_string(&mut content).unwrap();
content.lines().for_each(|line| {
////Result<Option<Match<'t>>>
if let Ok(Some(m)) = SYNTAX_REGEX.find(line) {
result.push_str(m.as_str());
}
// if let Ok(Some(m)) = IMPORT_REGEX.find(line) {
// result.push_str(m.as_str());
// result.push('\n');
// }
});
result.push('\n');
result
}

View File

@ -0,0 +1,300 @@
#![allow(unused_imports)]
#![allow(unused_attributes)]
#![allow(dead_code)]
mod ast;
mod proto_gen;
mod proto_info;
mod template;
use crate::util::path_string_with_component;
use itertools::Itertools;
use log::info;
pub use proto_gen::*;
pub use proto_info::*;
use std::fs::File;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::process::Command;
use walkdir::WalkDir;
pub fn gen(crate_name: &str) {
let crate_path = std::fs::canonicalize(".")
.unwrap()
.as_path()
.display()
.to_string();
// 1. generate the proto files to proto_file_dir
#[cfg(feature = "proto_gen")]
let proto_crates = gen_proto_files(crate_name, &crate_path);
for proto_crate in proto_crates {
let mut proto_file_paths = vec![];
let mut file_names = vec![];
let proto_file_output_path = proto_crate
.proto_output_path()
.to_str()
.unwrap()
.to_string();
let protobuf_output_path = proto_crate
.protobuf_crate_path()
.to_str()
.unwrap()
.to_string();
for (path, file_name) in WalkDir::new(&proto_file_output_path)
.into_iter()
.filter_map(|e| e.ok())
.map(|e| {
let path = e.path().to_str().unwrap().to_string();
let file_name = e.path().file_stem().unwrap().to_str().unwrap().to_string();
(path, file_name)
})
{
if path.ends_with(".proto") {
// https://stackoverflow.com/questions/49077147/how-can-i-force-build-rs-to-run-again-without-cleaning-my-whole-project
println!("cargo:rerun-if-changed={}", path);
proto_file_paths.push(path);
file_names.push(file_name);
}
}
let protoc_bin_path = protoc_bin_vendored::protoc_bin_path().unwrap();
// 2. generate the protobuf files(Dart)
#[cfg(feature = "dart")]
generate_dart_protobuf_files(
crate_name,
&proto_file_output_path,
&proto_file_paths,
&file_names,
&protoc_bin_path,
);
#[cfg(feature = "ts")]
generate_ts_protobuf_files(
crate_name,
&proto_file_output_path,
&proto_file_paths,
&file_names,
&protoc_bin_path,
);
// 3. generate the protobuf files(Rust)
generate_rust_protobuf_files(
&protoc_bin_path,
&proto_file_paths,
&proto_file_output_path,
&protobuf_output_path,
);
}
}
fn generate_rust_protobuf_files(
protoc_bin_path: &Path,
proto_file_paths: &[String],
proto_file_output_path: &str,
protobuf_output_path: &str,
) {
protoc_rust::Codegen::new()
.out_dir(protobuf_output_path)
.protoc_path(protoc_bin_path)
.inputs(proto_file_paths)
.include(proto_file_output_path)
.run()
.expect("Running rust protoc failed.");
}
#[cfg(feature = "ts")]
fn generate_ts_protobuf_files(
name: &str,
proto_file_output_path: &str,
paths: &[String],
file_names: &Vec<String>,
protoc_bin_path: &Path,
) {
let root = std::env::var("CARGO_MAKE_WORKING_DIRECTORY").unwrap_or("../../".to_string());
let tauri_backend_service_path = std::env::var("TAURI_BACKEND_SERVICE_PATH")
.unwrap_or("appflowy_tauri/src/services/backend".to_string());
let mut output = PathBuf::new();
output.push(root);
output.push(tauri_backend_service_path);
output.push("models");
output.push(name);
if !output.as_path().exists() {
std::fs::create_dir_all(&output).unwrap();
}
let protoc_bin_path = protoc_bin_path.to_str().unwrap().to_owned();
paths.iter().for_each(|path| {
// if let Err(err) = Command::new(protoc_bin_path.clone())
// .arg(format!("--ts_out={}", output.to_str().unwrap()))
// .arg(format!("--proto_path={}", proto_file_output_path))
// .arg(path)
// .spawn()
// {
// panic!("Generate ts pb file failed: {}, {:?}", path, err);
// }
let result = cmd_lib::run_cmd! {
${protoc_bin_path} --ts_out=${output} --proto_path=${proto_file_output_path} ${path}
};
if result.is_err() {
panic!("Generate ts pb file failed with: {}, {:?}", path, result)
};
});
let ts_index = path_string_with_component(&output, vec!["index.ts"]);
match std::fs::OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&ts_index)
{
Ok(ref mut file) => {
let mut export = String::new();
export.push_str("// Auto-generated, do not edit \n");
for file_name in file_names {
let c = format!("export * from \"./{}\";\n", file_name);
export.push_str(c.as_ref());
}
file.write_all(export.as_bytes()).unwrap();
File::flush(file).unwrap();
},
Err(err) => {
panic!("Failed to open file: {}", err);
},
}
}
#[cfg(feature = "dart")]
fn generate_dart_protobuf_files(
name: &str,
proto_file_output_path: &str,
paths: &[String],
file_names: &Vec<String>,
protoc_bin_path: &Path,
) {
if std::env::var("CARGO_MAKE_WORKING_DIRECTORY").is_err() {
log::error!("CARGO_MAKE_WORKING_DIRECTORY was not set, skip generate dart pb");
return;
}
if std::env::var("FLUTTER_FLOWY_SDK_PATH").is_err() {
log::error!("FLUTTER_FLOWY_SDK_PATH was not set, skip generate dart pb");
return;
}
let mut output = PathBuf::new();
output.push(std::env::var("CARGO_MAKE_WORKING_DIRECTORY").unwrap());
output.push(std::env::var("FLUTTER_FLOWY_SDK_PATH").unwrap());
output.push("lib");
output.push("protobuf");
output.push(name);
if !output.as_path().exists() {
std::fs::create_dir_all(&output).unwrap();
}
check_pb_dart_plugin();
let protoc_bin_path = protoc_bin_path.to_str().unwrap().to_owned();
paths.iter().for_each(|path| {
let result = cmd_lib::run_cmd! {
${protoc_bin_path} --dart_out=${output} --proto_path=${proto_file_output_path} ${path}
};
if result.is_err() {
panic!("Generate dart pb file failed with: {}, {:?}", path, result)
};
});
let protobuf_dart = path_string_with_component(&output, vec!["protobuf.dart"]);
match std::fs::OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(Path::new(&protobuf_dart))
{
Ok(ref mut file) => {
let mut export = String::new();
export.push_str("// Auto-generated, do not edit \n");
for file_name in file_names {
let c = format!("export './{}.pb.dart';\n", file_name);
export.push_str(c.as_ref());
}
file.write_all(export.as_bytes()).unwrap();
File::flush(file).unwrap();
},
Err(err) => {
panic!("Failed to open file: {}", err);
},
}
}
pub fn check_pb_dart_plugin() {
if cfg!(target_os = "windows") {
//Command::new("cmd")
// .arg("/C")
// .arg(cmd)
// .status()
// .expect("failed to execute process");
//panic!("{}", format!("\n❌ The protoc-gen-dart was not installed correctly."))
} else {
let exit_result = Command::new("sh")
.arg("-c")
.arg("command -v protoc-gen-dart")
.status()
.expect("failed to execute process");
if !exit_result.success() {
let mut msg = "\n❌ Can't find protoc-gen-dart in $PATH:\n".to_string();
let output = Command::new("sh").arg("-c").arg("echo $PATH").output();
let paths = String::from_utf8(output.unwrap().stdout)
.unwrap()
.split(':')
.map(|s| s.to_string())
.collect::<Vec<String>>();
paths.iter().for_each(|s| msg.push_str(&format!("{}\n", s)));
if let Ok(output) = Command::new("sh")
.arg("-c")
.arg("which protoc-gen-dart")
.output()
{
msg.push_str(&format!(
"Installed protoc-gen-dart path: {:?}\n",
String::from_utf8(output.stdout).unwrap()
));
}
msg.push_str("✅ You can fix that by adding:");
msg.push_str("\n\texport PATH=\"$PATH\":\"$HOME/.pub-cache/bin\"\n");
msg.push_str("to your shell's config file.(.bashrc, .bash, .profile, .zshrc etc.)");
panic!("{}", msg)
}
}
}
#[cfg(feature = "proto_gen")]
fn gen_proto_files(crate_name: &str, crate_path: &str) -> Vec<ProtobufCrate> {
let crate_context = ProtoGenerator::gen(crate_name, crate_path);
let proto_crates = crate_context
.iter()
.map(|info| info.protobuf_crate.clone())
.collect::<Vec<_>>();
crate_context
.into_iter()
.flat_map(|info| info.files)
.for_each(|file| {
println!("cargo:rerun-if-changed={}", file.file_path);
});
proto_crates
}

View File

@ -0,0 +1,173 @@
#![allow(unused_attributes)]
#![allow(dead_code)]
#![allow(unused_imports)]
#![allow(unused_results)]
use crate::protobuf_file::ast::parse_protobuf_context_from;
use crate::protobuf_file::proto_info::ProtobufCrateContext;
use crate::protobuf_file::ProtoFile;
use crate::util::*;
use crate::ProtoCache;
use std::collections::HashMap;
use std::fs::File;
use std::path::Path;
use std::{fs::OpenOptions, io::Write};
pub struct ProtoGenerator();
impl ProtoGenerator {
pub fn gen(crate_name: &str, crate_path: &str) -> Vec<ProtobufCrateContext> {
let crate_contexts = parse_protobuf_context_from(vec![crate_path.to_owned()]);
write_proto_files(&crate_contexts);
write_rust_crate_mod_file(&crate_contexts);
let proto_cache = ProtoCache::from_crate_contexts(&crate_contexts);
let proto_cache_str = serde_json::to_string(&proto_cache).unwrap();
let crate_cache_dir = path_buf_with_component(&cache_dir(), vec![crate_name]);
if !crate_cache_dir.as_path().exists() {
std::fs::create_dir_all(&crate_cache_dir).unwrap();
}
let protobuf_cache_path = path_string_with_component(&crate_cache_dir, vec!["proto_cache"]);
match std::fs::OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&protobuf_cache_path)
{
Ok(ref mut file) => {
file.write_all(proto_cache_str.as_bytes()).unwrap();
File::flush(file).unwrap();
},
Err(_err) => {
panic!("Failed to open file: {}", protobuf_cache_path);
},
}
crate_contexts
}
}
fn write_proto_files(crate_contexts: &[ProtobufCrateContext]) {
let file_path_content_map = crate_contexts
.iter()
.flat_map(|ctx| {
ctx
.files
.iter()
.map(|file| {
(
file.file_path.clone(),
ProtoFileSymbol {
file_name: file.file_name.clone(),
symbols: file.symbols(),
},
)
})
.collect::<HashMap<String, ProtoFileSymbol>>()
})
.collect::<HashMap<String, ProtoFileSymbol>>();
for context in crate_contexts {
let dir = context.protobuf_crate.proto_output_path();
context.files.iter().for_each(|file| {
// syntax
let mut file_content = file.syntax.clone();
// import
file_content.push_str(&gen_import_content(file, &file_path_content_map));
// content
file_content.push_str(&file.content);
let proto_file = format!("{}.proto", &file.file_name);
let proto_file_path = path_string_with_component(&dir, vec![&proto_file]);
save_content_to_file_with_diff_prompt(&file_content, proto_file_path.as_ref());
});
}
}
fn gen_import_content(
current_file: &ProtoFile,
file_path_symbols_map: &HashMap<String, ProtoFileSymbol>,
) -> String {
let mut import_files: Vec<String> = vec![];
file_path_symbols_map
.iter()
.for_each(|(file_path, proto_file_symbols)| {
if file_path != &current_file.file_path {
current_file.ref_types.iter().for_each(|ref_type| {
if proto_file_symbols.symbols.contains(ref_type) {
let import_file = format!("import \"{}.proto\";", proto_file_symbols.file_name);
if !import_files.contains(&import_file) {
import_files.push(import_file);
}
}
});
}
});
if import_files.len() == 1 {
format!("{}\n", import_files.pop().unwrap())
} else {
import_files.join("\n")
}
}
struct ProtoFileSymbol {
file_name: String,
symbols: Vec<String>,
}
fn write_rust_crate_mod_file(crate_contexts: &[ProtobufCrateContext]) {
for context in crate_contexts {
let mod_path = context.protobuf_crate.proto_model_mod_file();
match OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&mod_path)
{
Ok(ref mut file) => {
let mut mod_file_content = String::new();
mod_file_content.push_str("#![cfg_attr(rustfmt, rustfmt::skip)]\n");
mod_file_content.push_str(" #![allow(ambiguous_glob_reexports)]\n");
mod_file_content.push_str("// Auto-generated, do not edit\n");
walk_dir(
context.protobuf_crate.proto_output_path(),
|e| !e.file_type().is_dir(),
|_, name| {
let c = format!("\nmod {};\npub use {}::*;\n", &name, &name);
mod_file_content.push_str(c.as_ref());
},
);
file.write_all(mod_file_content.as_bytes()).unwrap();
},
Err(err) => {
panic!("Failed to open file: {}", err);
},
}
}
}
impl ProtoCache {
fn from_crate_contexts(crate_contexts: &[ProtobufCrateContext]) -> Self {
let proto_files = crate_contexts
.iter()
.flat_map(|crate_info| &crate_info.files)
.collect::<Vec<&ProtoFile>>();
let structs: Vec<String> = proto_files
.iter()
.flat_map(|info| info.structs.clone())
.collect();
let enums: Vec<String> = proto_files
.iter()
.flat_map(|info| info.enums.clone())
.collect();
Self { structs, enums }
}
}

View File

@ -0,0 +1,149 @@
#![allow(dead_code)]
use crate::flowy_toml::{parse_crate_config_from, CrateConfig, FlowyConfig};
use crate::util::*;
use std::fs::OpenOptions;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use walkdir::WalkDir;
#[derive(Debug)]
pub struct ProtobufCrateContext {
pub files: Vec<ProtoFile>,
pub protobuf_crate: ProtobufCrate,
}
impl ProtobufCrateContext {
pub fn from_crate_info(inner: ProtobufCrate, files: Vec<ProtoFile>) -> Self {
Self {
files,
protobuf_crate: inner,
}
}
pub fn create_crate_mod_file(&self) {
// mod model;
// pub use model::*;
let mod_file_path =
path_string_with_component(&self.protobuf_crate.protobuf_crate_path(), vec!["mod.rs"]);
let mut content = "#![cfg_attr(rustfmt, rustfmt::skip)]\n".to_owned();
content.push_str(" #![allow(ambiguous_glob_reexports)]\n");
content.push_str("// Auto-generated, do not edit\n");
content.push_str("mod model;\npub use model::*;");
match OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(Path::new(&mod_file_path))
{
Ok(ref mut file) => {
file.write_all(content.as_bytes()).unwrap();
},
Err(err) => {
panic!("Failed to open protobuf mod file: {}", err);
},
}
}
#[allow(dead_code)]
pub fn flutter_mod_dir(&self, root: &str) -> String {
let crate_module_dir = format!("{}/{}", root, self.protobuf_crate.crate_folder);
crate_module_dir
}
#[allow(dead_code)]
pub fn flutter_mod_file(&self, root: &str) -> String {
let crate_module_dir = format!(
"{}/{}/protobuf.dart",
root, self.protobuf_crate.crate_folder
);
crate_module_dir
}
}
#[derive(Clone, Debug)]
pub struct ProtobufCrate {
pub crate_folder: String,
pub crate_path: PathBuf,
flowy_config: FlowyConfig,
}
impl ProtobufCrate {
pub fn from_config(config: CrateConfig) -> Self {
ProtobufCrate {
crate_path: config.crate_path,
crate_folder: config.crate_folder,
flowy_config: config.flowy_config,
}
}
// Return the file paths for each rust file that used to generate the proto file.
pub fn proto_input_paths(&self) -> Vec<PathBuf> {
self
.flowy_config
.proto_input
.iter()
.map(|name| path_buf_with_component(&self.crate_path, vec![name]))
.collect::<Vec<PathBuf>>()
}
// The protobuf_crate_path is used to store the generated protobuf Rust structures.
pub fn protobuf_crate_path(&self) -> PathBuf {
let crate_path = PathBuf::from(&self.flowy_config.protobuf_crate_path);
create_dir_if_not_exist(&crate_path);
crate_path
}
// The proto_output_path is used to store the proto files
pub fn proto_output_path(&self) -> PathBuf {
let output_dir = PathBuf::from(&self.flowy_config.proto_output);
create_dir_if_not_exist(&output_dir);
output_dir
}
pub fn proto_model_mod_file(&self) -> String {
path_string_with_component(&self.protobuf_crate_path(), vec!["mod.rs"])
}
}
#[derive(Debug)]
pub struct ProtoFile {
pub file_path: String,
pub file_name: String,
pub structs: Vec<String>,
// store the type of current file using
pub ref_types: Vec<String>,
pub enums: Vec<String>,
// proto syntax. "proto3" or "proto2"
pub syntax: String,
// proto message content
pub content: String,
}
impl ProtoFile {
pub fn symbols(&self) -> Vec<String> {
let mut symbols = self.structs.clone();
let mut enum_symbols = self.enums.clone();
symbols.append(&mut enum_symbols);
symbols
}
}
pub fn parse_crate_info_from_path(roots: Vec<String>) -> Vec<ProtobufCrate> {
let mut protobuf_crates: Vec<ProtobufCrate> = vec![];
roots.iter().for_each(|root| {
let crates = WalkDir::new(root)
.into_iter()
.filter_entry(|e| !is_hidden(e))
.filter_map(|e| e.ok())
.filter(is_crate_dir)
.flat_map(|e| parse_crate_config_from(&e))
.map(ProtobufCrate::from_config)
.collect::<Vec<ProtobufCrate>>();
protobuf_crates.extend(crates);
});
protobuf_crates
}

View File

@ -0,0 +1,35 @@
use crate::util::get_tera;
use itertools::Itertools;
use tera::Context;
pub struct ProtobufDeriveMeta {
context: Context,
structs: Vec<String>,
enums: Vec<String>,
}
#[allow(dead_code)]
impl ProtobufDeriveMeta {
pub fn new(structs: Vec<String>, enums: Vec<String>) -> Self {
let enums: Vec<_> = enums.into_iter().unique().collect();
ProtobufDeriveMeta {
context: Context::new(),
structs,
enums,
}
}
pub fn render(&mut self) -> Option<String> {
self.context.insert("names", &self.structs);
self.context.insert("enums", &self.enums);
let tera = get_tera("protobuf_file/template/derive_meta");
match tera.render("derive_meta.tera", &self.context) {
Ok(r) => Some(r),
Err(e) => {
log::error!("{:?}", e);
None
},
}
}
}

View File

@ -0,0 +1,45 @@
#![cfg_attr(rustfmt, rustfmt::skip)]
pub enum TypeCategory {
Array,
Map,
Str,
Protobuf,
Bytes,
Enum,
Opt,
Primitive,
}
// auto generate, do not edit
pub fn category_from_str(type_str: &str) -> TypeCategory {
match type_str {
"Vec" => TypeCategory::Array,
"HashMap" => TypeCategory::Map,
"u8" => TypeCategory::Bytes,
"String" => TypeCategory::Str,
{%- for name in names -%}
{%- if loop.first %}
"{{ name }}"
{%- else %}
| "{{ name }}"
{%- endif -%}
{%- if loop.last %}
=> TypeCategory::Protobuf,
{%- endif %}
{%- endfor %}
{%- for enum in enums -%}
{%- if loop.first %}
"{{ enum }}"
{%- else %}
| "{{ enum }}"
{%- endif -%}
{%- if loop.last %}
=> TypeCategory::Enum,
{%- endif %}
{%- endfor %}
"Option" => TypeCategory::Opt,
_ => TypeCategory::Primitive,
}
}

View File

@ -0,0 +1,4 @@
#![allow(clippy::module_inception)]
mod derive_meta;
pub use derive_meta::*;

View File

@ -0,0 +1,5 @@
mod derive_meta;
mod proto_file;
pub use derive_meta::*;
pub use proto_file::*;

View File

@ -0,0 +1,5 @@
enum {{ enum_name }} {
{%- for item in items %}
{{ item }}
{%- endfor %}
}

View File

@ -0,0 +1,40 @@
use crate::protobuf_file::ast::FlowyEnum;
use crate::util::get_tera;
use tera::Context;
pub struct EnumTemplate {
context: Context,
items: Vec<String>,
}
#[allow(dead_code)]
impl EnumTemplate {
pub fn new() -> Self {
EnumTemplate {
context: Context::new(),
items: vec![],
}
}
pub fn set_message_enum(&mut self, flowy_enum: &FlowyEnum) {
self.context.insert("enum_name", &flowy_enum.name);
flowy_enum.attrs.iter().for_each(|item| {
self.items.push(format!(
"{} = {};",
item.attrs.enum_item_name, item.attrs.value
))
})
}
pub fn render(&mut self) -> Option<String> {
self.context.insert("items", &self.items);
let tera = get_tera("protobuf_file/template/proto_file");
match tera.render("enum.tera", &self.context) {
Ok(r) => Some(r),
Err(e) => {
log::error!("{:?}", e);
None
},
}
}
}

View File

@ -0,0 +1,5 @@
mod enum_template;
mod struct_template;
pub use enum_template::*;
pub use struct_template::*;

View File

@ -0,0 +1,5 @@
message {{ struct_name }} {
{%- for field in fields %}
{{ field }}
{%- endfor %}
}

View File

@ -0,0 +1,110 @@
use crate::util::get_tera;
use flowy_ast::*;
use phf::phf_map;
use tera::Context;
// Protobuf data type : https://developers.google.com/protocol-buffers/docs/proto3
pub static RUST_TYPE_MAP: phf::Map<&'static str, &'static str> = phf_map! {
"String" => "string",
"i64" => "int64",
"i32" => "int32",
"u64" => "uint64",
"u32" => "uint32",
"Vec" => "repeated",
"f64" => "double",
"HashMap" => "map",
};
pub struct StructTemplate {
context: Context,
fields: Vec<String>,
}
#[allow(dead_code)]
impl StructTemplate {
pub fn new() -> Self {
StructTemplate {
context: Context::new(),
fields: vec![],
}
}
pub fn set_message_struct_name(&mut self, name: &str) {
self.context.insert("struct_name", name);
}
pub fn set_field(&mut self, field: &ASTField) {
// {{ field_type }} {{ field_name }} = {{index}};
let name = field.name().unwrap().to_string();
let index = field.pb_attrs.pb_index().unwrap();
let ty: &str = &field.ty_as_str();
let mut mapped_ty: &str = ty;
if RUST_TYPE_MAP.contains_key(ty) {
mapped_ty = RUST_TYPE_MAP[ty];
}
if let Some(ref category) = field.bracket_category {
match category {
BracketCategory::Opt => match &field.bracket_inner_ty {
None => {},
Some(inner_ty) => match inner_ty.to_string().as_str() {
//TODO: support hashmap or something else wrapped by Option
"Vec" => {
self.fields.push(format!(
"oneof one_of_{} {{ bytes {} = {}; }};",
name, name, index
));
},
_ => {
self.fields.push(format!(
"oneof one_of_{} {{ {} {} = {}; }};",
name, mapped_ty, name, index
));
},
},
},
BracketCategory::Map((k, v)) => {
let key: &str = k;
let value: &str = v;
self.fields.push(format!(
// map<string, string> attrs = 1;
"map<{}, {}> {} = {};",
RUST_TYPE_MAP.get(key).unwrap_or(&key),
RUST_TYPE_MAP.get(value).unwrap_or(&value),
name,
index
));
},
BracketCategory::Vec => {
let bracket_ty: &str = &field.bracket_ty.as_ref().unwrap().to_string();
// Vec<u8>
if mapped_ty == "u8" && bracket_ty == "Vec" {
self.fields.push(format!("bytes {} = {};", name, index))
} else {
self.fields.push(format!(
"{} {} {} = {};",
RUST_TYPE_MAP[bracket_ty], mapped_ty, name, index
))
}
},
BracketCategory::Other => self
.fields
.push(format!("{} {} = {};", mapped_ty, name, index)),
}
}
}
pub fn render(&mut self) -> Option<String> {
self.context.insert("fields", &self.fields);
let tera = get_tera("protobuf_file/template/proto_file");
match tera.render("struct.tera", &self.context) {
Ok(r) => Some(r),
Err(e) => {
log::error!("{:?}", e);
None
},
}
}
}

View File

@ -0,0 +1,70 @@
use crate::util::get_tera;
use tera::Context;
pub struct EventTemplate {
tera_context: Context,
}
pub struct EventRenderContext {
pub input_deserializer: Option<String>,
pub output_deserializer: Option<String>,
pub error_deserializer: String,
pub event: String,
pub event_ty: String,
pub prefix: String,
}
#[allow(dead_code)]
impl EventTemplate {
pub fn new() -> Self {
EventTemplate {
tera_context: Context::new(),
}
}
pub fn render(&mut self, ctx: EventRenderContext, index: usize) -> Option<String> {
self.tera_context.insert("index", &index);
let event_func_name = format!("{}{}", ctx.event_ty, ctx.event);
self
.tera_context
.insert("event_func_name", &event_func_name);
self
.tera_context
.insert("event_name", &format!("{}.{}", ctx.prefix, ctx.event_ty));
self.tera_context.insert("event", &ctx.event);
self
.tera_context
.insert("has_input", &ctx.input_deserializer.is_some());
match ctx.input_deserializer {
None => {},
Some(ref input) => self
.tera_context
.insert("input_deserializer", &format!("{}.{}", ctx.prefix, input)),
}
let has_output = ctx.output_deserializer.is_some();
self.tera_context.insert("has_output", &has_output);
match ctx.output_deserializer {
None => self.tera_context.insert("output_deserializer", "void"),
Some(ref output) => self
.tera_context
.insert("output_deserializer", &format!("{}.{}", ctx.prefix, output)),
}
self.tera_context.insert(
"error_deserializer",
&format!("{}.{}", ctx.prefix, ctx.error_deserializer),
);
let tera = get_tera("ts_event");
match tera.render("event_template.tera", &self.tera_context) {
Ok(r) => Some(r),
Err(e) => {
log::error!("{:?}", e);
None
},
}
}
}

View File

@ -0,0 +1,36 @@
{%- if has_input %}
export async function {{ event_func_name }}(payload: {{ input_deserializer }}): Promise<Result<{{ output_deserializer }}, {{ error_deserializer }}>> {
{%- else %}
export async function {{ event_func_name }}(): Promise<Result<{{ output_deserializer }}, {{ error_deserializer }}>> {
{%- endif %}
{%- if has_input %}
let args = {
request: {
ty: {{ event_name }}[{{ event_name }}.{{ event }}],
payload: Array.from(payload.serializeBinary()),
},
};
{%- else %}
let args = {
request: {
ty: {{ event_name }}[{{ event_name }}.{{ event }}],
payload: Array.from([]),
},
};
{%- endif %}
let result: { code: number; payload: Uint8Array } = await invoke("invoke_request", args);
if (result.code == 0) {
{%- if has_output %}
let object = {{ output_deserializer }}.deserializeBinary(result.payload);
return Ok(object);
{%- else %}
return Ok.EMPTY;
{%- endif %}
} else {
let error = {{ error_deserializer }}.deserializeBinary(result.payload);
console.log({{ event_func_name }}.name, error);
return Err(error);
}
}

View File

@ -0,0 +1,207 @@
mod event_template;
use crate::ast::EventASTContext;
use crate::flowy_toml::{parse_crate_config_from, CrateConfig};
use crate::ts_event::event_template::{EventRenderContext, EventTemplate};
use crate::util::{is_crate_dir, is_hidden, path_string_with_component, read_file};
use flowy_ast::ASTResult;
use std::collections::HashSet;
use std::fs::File;
use std::io::Write;
use std::path::PathBuf;
use syn::Item;
use walkdir::WalkDir;
pub fn gen(crate_name: &str) {
let root = std::env::var("CARGO_MAKE_WORKING_DIRECTORY").unwrap_or("../../".to_string());
let tauri_backend_service_path = std::env::var("TAURI_BACKEND_SERVICE_PATH")
.unwrap_or("appflowy_tauri/src/services/backend".to_string());
let crate_path = std::fs::canonicalize(".")
.unwrap()
.as_path()
.display()
.to_string();
let event_crates = parse_ts_event_files(vec![crate_path]);
let event_ast = event_crates
.iter()
.flat_map(parse_event_crate)
.collect::<Vec<_>>();
let event_render_ctx = ast_to_event_render_ctx(event_ast.as_ref());
let mut render_result = TS_HEADER.to_string();
for (index, render_ctx) in event_render_ctx.into_iter().enumerate() {
let mut event_template = EventTemplate::new();
if let Some(content) = event_template.render(render_ctx, index) {
render_result.push_str(content.as_ref())
}
}
render_result.push_str(TS_FOOTER);
let ts_event_folder: PathBuf = [&root, &tauri_backend_service_path, "events", crate_name]
.iter()
.collect();
if !ts_event_folder.as_path().exists() {
std::fs::create_dir_all(ts_event_folder.as_path()).unwrap();
}
let event_file = "event";
let event_file_ext = "ts";
let ts_event_file_path = path_string_with_component(
&ts_event_folder,
vec![&format!("{}.{}", event_file, event_file_ext)],
);
println!("cargo:rerun-if-changed={}", ts_event_file_path);
match std::fs::OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&ts_event_file_path)
{
Ok(ref mut file) => {
file.write_all(render_result.as_bytes()).unwrap();
File::flush(file).unwrap();
},
Err(err) => {
panic!("Failed to open file: {}, {:?}", ts_event_file_path, err);
},
}
let ts_index = path_string_with_component(&ts_event_folder, vec!["index.ts"]);
match std::fs::OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&ts_index)
{
Ok(ref mut file) => {
let mut export = String::new();
export.push_str("// Auto-generated, do not edit \n");
export.push_str(&format!("export * from '../../models/{}';\n", crate_name));
export.push_str(&format!("export * from './{}';\n", event_file));
file.write_all(export.as_bytes()).unwrap();
File::flush(file).unwrap();
},
Err(err) => {
panic!("Failed to open file: {}", err);
},
}
}
#[derive(Debug)]
pub struct TsEventCrate {
crate_path: PathBuf,
event_files: Vec<String>,
}
impl TsEventCrate {
pub fn from_config(config: &CrateConfig) -> Self {
TsEventCrate {
crate_path: config.crate_path.clone(),
event_files: config.flowy_config.event_files.clone(),
}
}
}
pub fn parse_ts_event_files(crate_paths: Vec<String>) -> Vec<TsEventCrate> {
let mut ts_event_crates: Vec<TsEventCrate> = vec![];
crate_paths.iter().for_each(|path| {
let crates = WalkDir::new(path)
.into_iter()
.filter_entry(|e| !is_hidden(e))
.filter_map(|e| e.ok())
.filter(is_crate_dir)
.flat_map(|e| parse_crate_config_from(&e))
.map(|crate_config| TsEventCrate::from_config(&crate_config))
.collect::<Vec<TsEventCrate>>();
ts_event_crates.extend(crates);
});
ts_event_crates
}
pub fn parse_event_crate(event_crate: &TsEventCrate) -> Vec<EventASTContext> {
event_crate
.event_files
.iter()
.flat_map(|event_file| {
let file_path =
path_string_with_component(&event_crate.crate_path, vec![event_file.as_str()]);
let file_content = read_file(file_path.as_ref()).unwrap();
let ast = syn::parse_file(file_content.as_ref()).expect("Unable to parse file");
ast
.items
.iter()
.flat_map(|item| match item {
Item::Enum(item_enum) => {
let ast_result = ASTResult::new();
let attrs = flowy_ast::enum_from_ast(
&ast_result,
&item_enum.ident,
&item_enum.variants,
&item_enum.attrs,
);
ast_result.check().unwrap();
attrs
.iter()
.filter(|attr| !attr.attrs.event_attrs.ignore)
.enumerate()
.map(|(_index, variant)| EventASTContext::from(&variant.attrs))
.collect::<Vec<_>>()
},
_ => vec![],
})
.collect::<Vec<_>>()
})
.collect::<Vec<EventASTContext>>()
}
pub fn ast_to_event_render_ctx(ast: &[EventASTContext]) -> Vec<EventRenderContext> {
let mut import_objects = HashSet::new();
ast.iter().for_each(|event_ast| {
if let Some(input) = event_ast.event_input.as_ref() {
import_objects.insert(input.get_ident().unwrap().to_string());
}
if let Some(output) = event_ast.event_output.as_ref() {
import_objects.insert(output.get_ident().unwrap().to_string());
}
});
ast
.iter()
.map(|event_ast| {
let input_deserializer = event_ast
.event_input
.as_ref()
.map(|event_input| event_input.get_ident().unwrap().to_string());
let output_deserializer = event_ast
.event_output
.as_ref()
.map(|event_output| event_output.get_ident().unwrap().to_string());
EventRenderContext {
input_deserializer,
output_deserializer,
error_deserializer: event_ast.event_error.to_string(),
event: event_ast.event.to_string(),
event_ty: event_ast.event_ty.to_string(),
prefix: "pb".to_string(),
}
})
.collect::<Vec<EventRenderContext>>()
}
const TS_HEADER: &str = r#"
/// Auto generate. Do not edit
import { Ok, Err, Result } from "ts-results";
import { invoke } from "@tauri-apps/api/tauri";
import * as pb from "../..";
"#;
const TS_FOOTER: &str = r#"
"#;

View File

@ -0,0 +1,190 @@
use console::Style;
use similar::{ChangeTag, TextDiff};
use std::path::{Path, PathBuf};
use std::str::FromStr;
use std::{
fs::{File, OpenOptions},
io::{Read, Write},
};
use tera::Tera;
use walkdir::WalkDir;
pub fn read_file(path: &str) -> Option<String> {
let mut file = File::open(path).unwrap_or_else(|_| panic!("Unable to open file at {}", path));
let mut content = String::new();
match file.read_to_string(&mut content) {
Ok(_) => Some(content),
Err(e) => {
log::error!("{}, with error: {:?}", path, e);
Some("".to_string())
},
}
}
pub fn save_content_to_file_with_diff_prompt(content: &str, output_file: &str) {
if Path::new(output_file).exists() {
let old_content = read_file(output_file).unwrap();
let new_content = content.to_owned();
let write_to_file = || match OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(output_file)
{
Ok(ref mut file) => {
file.write_all(new_content.as_bytes()).unwrap();
},
Err(err) => {
panic!("Failed to open log file: {}", err);
},
};
if new_content != old_content {
print_diff(old_content, new_content.clone());
write_to_file()
}
} else {
match OpenOptions::new()
.create(true)
.write(true)
.open(output_file)
{
Ok(ref mut file) => file.write_all(content.as_bytes()).unwrap(),
Err(err) => panic!("Open or create to {} fail: {}", output_file, err),
}
}
}
pub fn print_diff(old_content: String, new_content: String) {
let diff = TextDiff::from_lines(&old_content, &new_content);
for op in diff.ops() {
for change in diff.iter_changes(op) {
let (sign, style) = match change.tag() {
ChangeTag::Delete => ("-", Style::new().red()),
ChangeTag::Insert => ("+", Style::new().green()),
ChangeTag::Equal => (" ", Style::new()),
};
match change.tag() {
ChangeTag::Delete => {
print!("{}{}", style.apply_to(sign).bold(), style.apply_to(change));
},
ChangeTag::Insert => {
print!("{}{}", style.apply_to(sign).bold(), style.apply_to(change));
},
ChangeTag::Equal => {},
};
}
println!("---------------------------------------------------");
}
}
#[allow(dead_code)]
pub fn is_crate_dir(e: &walkdir::DirEntry) -> bool {
let cargo = e.path().file_stem().unwrap().to_str().unwrap().to_string();
cargo == *"Cargo"
}
#[allow(dead_code)]
pub fn is_proto_file(e: &walkdir::DirEntry) -> bool {
if e.path().extension().is_none() {
return false;
}
let ext = e.path().extension().unwrap().to_str().unwrap().to_string();
ext == *"proto"
}
pub fn is_hidden(entry: &walkdir::DirEntry) -> bool {
entry
.file_name()
.to_str()
.map(|s| s.starts_with('.'))
.unwrap_or(false)
}
pub fn create_dir_if_not_exist(dir: &Path) {
if !dir.exists() {
std::fs::create_dir_all(dir).unwrap();
}
}
pub fn path_string_with_component(path: &Path, components: Vec<&str>) -> String {
path_buf_with_component(path, components)
.to_str()
.unwrap()
.to_string()
}
#[allow(dead_code)]
pub fn path_buf_with_component(path: &Path, components: Vec<&str>) -> PathBuf {
let mut path_buf = path.to_path_buf();
for component in components {
path_buf.push(component);
}
path_buf
}
#[allow(dead_code)]
pub fn walk_dir<P: AsRef<Path>, F1, F2>(dir: P, filter: F2, mut path_and_name: F1)
where
F1: FnMut(String, String),
F2: Fn(&walkdir::DirEntry) -> bool,
{
for (path, name) in WalkDir::new(dir)
.into_iter()
.filter_map(|e| e.ok())
.filter(|e| filter(e))
.map(|e| {
(
e.path().to_str().unwrap().to_string(),
e.path().file_stem().unwrap().to_str().unwrap().to_string(),
)
})
{
path_and_name(path, name);
}
}
#[allow(dead_code)]
pub fn suffix_relative_to_path(path: &str, base: &str) -> String {
let base = Path::new(base);
let path = Path::new(path);
path
.strip_prefix(base)
.unwrap()
.to_str()
.unwrap()
.to_owned()
}
pub fn get_tera(directory: &str) -> Tera {
let mut root = format!("{}/src/", env!("CARGO_MANIFEST_DIR"));
root.push_str(directory);
let root_absolute_path = match std::fs::canonicalize(&root) {
Ok(p) => p.as_path().display().to_string(),
Err(e) => {
panic!("❌ Canonicalize file path {} failed {:?}", root, e);
},
};
let mut template_path = format!("{}/**/*.tera", root_absolute_path);
if cfg!(windows) {
// remove "\\?\" prefix on windows
template_path = format!("{}/**/*.tera", &root_absolute_path[4..]);
}
match Tera::new(template_path.as_ref()) {
Ok(t) => t,
Err(e) => {
log::error!("Parsing error(s): {}", e);
::std::process::exit(1);
},
}
}
pub fn cache_dir() -> PathBuf {
let mut path_buf = PathBuf::from_str(env!("CARGO_MANIFEST_DIR")).unwrap();
path_buf.push(".cache");
path_buf
}

View File

@ -0,0 +1,2 @@
/target
Cargo.lock

View File

@ -0,0 +1,29 @@
[package]
name = "flowy-derive"
version = "0.1.0"
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
proc-macro = true
name = "flowy_derive"
[[test]]
name = "tests"
path = "tests/progress.rs"
[dependencies]
syn = { version = "1.0.109", features = ["extra-traits", "visit"] }
quote = "1.0"
proc-macro2 = "1.0"
flowy-ast.workspace = true
lazy_static = {version = "1.4.0"}
dashmap = "5"
flowy-codegen.workspace = true
serde_json.workspace = true
walkdir = "2.3.2"
[dev-dependencies]
tokio = { workspace = true, features = ["full"] }
trybuild = "1.0.77"

View File

@ -0,0 +1,41 @@
use proc_macro2::TokenStream;
// #[proc_macro_derive(DartEvent, attributes(event_ty))]
pub fn expand_enum_derive(_input: &syn::DeriveInput) -> Result<TokenStream, Vec<syn::Error>> {
Ok(TokenStream::default())
}
// use flowy_ast::{ASTContainer, Ctxt};
// use proc_macro2::TokenStream;
//
// // #[proc_macro_derive(DartEvent, attributes(event_ty))]
// pub fn expand_enum_derive(input: &syn::DeriveInput) -> Result<TokenStream,
// Vec<syn::Error>> { let ctxt = Ctxt::new();
// let cont = match ASTContainer::from_ast(&ctxt, input) {
// Some(cont) => cont,
// None => return Err(ctxt.check().unwrap_err()),
// };
//
// let enum_ident = &cont.ident;
// let pb_enum = cont.attrs.pb_enum_type().unwrap();
//
// let build_display_pb_enum = cont.data.all_idents().map(|i| {
// let a = format_ident!("{}", i.to_string());
// let token_stream: TokenStream = quote! {
// #enum_ident::#i => f.write_str(&#a)?,
// };
// token_stream
// });
//
// ctxt.check()?;
//
// Ok(quote! {
// impl std::fmt::Display for #enum_ident {
// fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result
// { match self {
// #(#build_display_pb_enum)*
// }
// }
// }
// })
// }

View File

@ -0,0 +1,51 @@
// https://docs.rs/syn/1.0.48/syn/struct.DeriveInput.html
extern crate proc_macro;
use proc_macro::TokenStream;
use quote::quote;
use syn::{parse_macro_input, DeriveInput};
#[macro_use]
extern crate quote;
mod dart_event;
mod node;
mod proto_buf;
// Inspired by https://serde.rs/attributes.html
#[proc_macro_derive(ProtoBuf, attributes(pb))]
pub fn derive_proto_buf(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
proto_buf::expand_derive(&input)
.unwrap_or_else(to_compile_errors)
.into()
}
#[proc_macro_derive(ProtoBuf_Enum, attributes(pb))]
pub fn derive_proto_buf_enum(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
proto_buf::expand_enum_derive(&input)
.unwrap_or_else(to_compile_errors)
.into()
}
#[proc_macro_derive(Flowy_Event, attributes(event, event_err))]
pub fn derive_dart_event(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
dart_event::expand_enum_derive(&input)
.unwrap_or_else(to_compile_errors)
.into()
}
#[proc_macro_derive(Node, attributes(node, nodes, node_type))]
pub fn derive_node(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
node::expand_derive(&input)
.unwrap_or_else(to_compile_errors)
.into()
}
fn to_compile_errors(errors: Vec<syn::Error>) -> proc_macro2::TokenStream {
let compile_errors = errors.iter().map(syn::Error::to_compile_error);
quote!(#(#compile_errors)*)
}

View File

@ -0,0 +1,234 @@
use flowy_ast::{ASTContainer, ASTField, ASTResult};
use proc_macro2::TokenStream;
pub fn expand_derive(input: &syn::DeriveInput) -> Result<TokenStream, Vec<syn::Error>> {
let ast_result = ASTResult::new();
let cont = match ASTContainer::from_ast(&ast_result, input) {
Some(cont) => cont,
None => return Err(ast_result.check().unwrap_err()),
};
let mut token_stream: TokenStream = TokenStream::default();
token_stream.extend(make_helper_funcs_token_stream(&cont));
token_stream.extend(make_to_node_data_token_stream(&cont));
if let Some(get_value_token_stream) = make_get_set_value_token_steam(&cont) {
token_stream.extend(get_value_token_stream);
}
token_stream.extend(make_alter_children_token_stream(&ast_result, &cont));
ast_result.check()?;
Ok(token_stream)
}
pub fn make_helper_funcs_token_stream(ast: &ASTContainer) -> TokenStream {
let mut token_streams = TokenStream::default();
let struct_ident = &ast.ident;
token_streams.extend(quote! {
impl #struct_ident {
pub fn get_path(&self) -> Option<Path> {
let node_id = &self.node_id?;
Some(self.tree.read().path_from_node_id(node_id.clone()))
}
}
});
token_streams
}
pub fn make_alter_children_token_stream(ast_result: &ASTResult, ast: &ASTContainer) -> TokenStream {
let mut token_streams = TokenStream::default();
let children_fields = ast
.data
.all_fields()
.filter(|field| field.node_attrs.has_child)
.collect::<Vec<&ASTField>>();
if !children_fields.is_empty() {
let struct_ident = &ast.ident;
if children_fields.len() > 1 {
ast_result.error_spanned_by(struct_ident, "Only one children property");
return token_streams;
}
let children_field = children_fields.first().unwrap();
let field_name = children_field.name().unwrap();
let child_name = children_field.node_attrs.child_name.as_ref().unwrap();
let get_func_name = format_ident!("get_{}", child_name.value());
let get_mut_func_name = format_ident!("get_mut_{}", child_name.value());
let add_func_name = format_ident!("add_{}", child_name.value());
let remove_func_name = format_ident!("remove_{}", child_name.value());
let ty = children_field.bracket_inner_ty.as_ref().unwrap().clone();
token_streams.extend(quote! {
impl #struct_ident {
pub fn #get_func_name<T: AsRef<str>>(&self, id: T) -> Option<&#ty> {
let id = id.as_ref();
self.#field_name.iter().find(|element| element.id == id)
}
pub fn #get_mut_func_name<T: AsRef<str>>(&mut self, id: T) -> Option<&mut #ty> {
let id = id.as_ref();
self.#field_name.iter_mut().find(|element| element.id == id)
}
pub fn #remove_func_name<T: AsRef<str>>(&mut self, id: T) {
let id = id.as_ref();
if let Some(index) = self.#field_name.iter().position(|element| element.id == id && element.node_id.is_some()) {
let element = self.#field_name.remove(index);
let element_path = element.get_path().unwrap();
let mut write_guard = self.tree.write();
let mut nodes = vec![];
if let Some(node_data) = element.node_id.and_then(|node_id| write_guard.get_node_data(node_id.clone())) {
nodes.push(node_data);
}
let _ = write_guard.apply_op(NodeOperation::Delete {
path: element_path,
nodes,
});
}
}
pub fn #add_func_name(&mut self, mut value: #ty) -> Result<(), String> {
if self.node_id.is_none() {
return Err("The node id is empty".to_owned());
}
let mut transaction = Transaction::new();
let parent_path = self.get_path().unwrap();
let path = parent_path.clone_with(self.#field_name.len());
let node_data = value.to_node_data();
transaction.push_operation(NodeOperation::Insert {
path: path.clone(),
nodes: vec![node_data],
});
let _ = self.tree.write().apply_transaction(transaction);
let child_node_id = self.tree.read().node_id_at_path(path).unwrap();
value.node_id = Some(child_node_id);
self.#field_name.push(value);
Ok(())
}
}
});
}
token_streams
}
pub fn make_to_node_data_token_stream(ast: &ASTContainer) -> TokenStream {
let struct_ident = &ast.ident;
let mut token_streams = TokenStream::default();
let node_type = ast
.node_type
.as_ref()
.expect("Define the type of the node by using #[node_type = \"xx\" in the struct");
let set_key_values = ast
.data
.all_fields()
.filter(|field| !field.node_attrs.has_child)
.flat_map(|field| {
let mut field_name = field
.name()
.expect("the name of the field should not be empty");
let original_field_name = field
.name()
.expect("the name of the field should not be empty");
if let Some(rename) = &field.node_attrs.rename {
field_name = format_ident!("{}", rename.value());
}
let field_name_str = field_name.to_string();
quote! {
.insert_attribute(#field_name_str, self.#original_field_name.clone())
}
});
let children_fields = ast
.data
.all_fields()
.filter(|field| field.node_attrs.has_child)
.collect::<Vec<&ASTField>>();
let childrens_token_streams = match children_fields.is_empty() {
true => {
quote! {
let children = vec![];
}
},
false => {
let children_field = children_fields.first().unwrap();
let original_field_name = children_field
.name()
.expect("the name of the field should not be empty");
quote! {
let children = self.#original_field_name.iter().map(|value| value.to_node_data()).collect::<Vec<NodeData>>();
}
},
};
token_streams.extend(quote! {
impl ToNodeData for #struct_ident {
fn to_node_data(&self) -> NodeData {
#childrens_token_streams
let builder = NodeDataBuilder::new(#node_type)
#(#set_key_values)*
.extend_node_data(children);
builder.build()
}
}
});
token_streams
}
pub fn make_get_set_value_token_steam(ast: &ASTContainer) -> Option<TokenStream> {
let struct_ident = &ast.ident;
let mut token_streams = TokenStream::default();
let tree = format_ident!("tree");
for field in ast.data.all_fields() {
if field.node_attrs.has_child {
continue;
}
let mut field_name = field
.name()
.expect("the name of the field should not be empty");
if let Some(rename) = &field.node_attrs.rename {
field_name = format_ident!("{}", rename.value());
}
let field_name_str = field_name.to_string();
let get_func_name = format_ident!("get_{}", field_name);
let set_func_name = format_ident!("set_{}", field_name);
let get_value_return_ty = field.ty;
let set_value_input_ty = field.ty;
if let Some(get_value_with_fn) = &field.node_attrs.get_node_value_with {
token_streams.extend(quote! {
impl #struct_ident {
pub fn #get_func_name(&self) -> Option<#get_value_return_ty> {
let node_id = self.node_id.as_ref()?;
#get_value_with_fn(self.#tree.clone(), node_id, #field_name_str)
}
}
});
}
if let Some(set_value_with_fn) = &field.node_attrs.set_node_value_with {
token_streams.extend(quote! {
impl #struct_ident {
pub fn #set_func_name(&self, value: #set_value_input_ty) {
if let Some(node_id) = self.node_id.as_ref() {
let _ = #set_value_with_fn(self.#tree.clone(), node_id, #field_name_str, value);
}
}
}
});
}
}
Some(token_streams)
}

View File

@ -0,0 +1,270 @@
use proc_macro2::{Span, TokenStream};
use flowy_ast::*;
use crate::proto_buf::util::*;
pub fn make_de_token_steam(ast_result: &ASTResult, ast: &ASTContainer) -> Option<TokenStream> {
let pb_ty = ast.pb_attrs.pb_struct_type()?;
let struct_ident = &ast.ident;
let build_take_fields = ast
.data
.all_fields()
.filter(|f| !f.pb_attrs.skip_pb_deserializing())
.flat_map(|field| {
if let Some(func) = field.pb_attrs.deserialize_pb_with() {
let member = &field.member;
Some(quote! { o.#member=#struct_ident::#func(pb); })
} else if field.pb_attrs.is_one_of() {
token_stream_for_one_of(ast_result, field)
} else {
token_stream_for_field(ast_result, &field.member, field.ty, false)
}
});
let de_token_stream: TokenStream = quote! {
impl std::convert::TryFrom<bytes::Bytes> for #struct_ident {
type Error = ::protobuf::ProtobufError;
fn try_from(bytes: bytes::Bytes) -> Result<Self, Self::Error> {
Self::try_from(&bytes)
}
}
impl std::convert::TryFrom<&bytes::Bytes> for #struct_ident {
type Error = ::protobuf::ProtobufError;
fn try_from(bytes: &bytes::Bytes) -> Result<Self, Self::Error> {
let pb: crate::protobuf::#pb_ty = ::protobuf::Message::parse_from_bytes(bytes)?;
Ok(#struct_ident::from(pb))
}
}
impl std::convert::TryFrom<&[u8]> for #struct_ident {
type Error = ::protobuf::ProtobufError;
fn try_from(bytes: &[u8]) -> Result<Self, Self::Error> {
let pb: crate::protobuf::#pb_ty = ::protobuf::Message::parse_from_bytes(bytes)?;
Ok(#struct_ident::from(pb))
}
}
impl std::convert::From<crate::protobuf::#pb_ty> for #struct_ident {
fn from(mut pb: crate::protobuf::#pb_ty) -> Self {
let mut o = Self::default();
#(#build_take_fields)*
o
}
}
};
Some(de_token_stream)
// None
}
fn token_stream_for_one_of(ast_result: &ASTResult, field: &ASTField) -> Option<TokenStream> {
let member = &field.member;
let ident = get_member_ident(ast_result, member)?;
let ty_info = match parse_ty(ast_result, field.ty) {
Ok(ty_info) => ty_info,
Err(e) => {
eprintln!(
"token_stream_for_one_of failed: {:?} with error: {}",
member, e
);
panic!();
},
}?;
let bracketed_ty_info = ty_info.bracket_ty_info.as_ref().as_ref();
let has_func = format_ident!("has_{}", ident.to_string());
match ident_category(bracketed_ty_info.unwrap().ident) {
TypeCategory::Enum => {
let get_func = format_ident!("get_{}", ident.to_string());
let ty = bracketed_ty_info.unwrap().ty;
Some(quote! {
if pb.#has_func() {
let enum_de_from_pb = #ty::from(&pb.#get_func());
o.#member = Some(enum_de_from_pb);
}
})
},
TypeCategory::Primitive => {
let get_func = format_ident!("get_{}", ident.to_string());
Some(quote! {
if pb.#has_func() {
o.#member=Some(pb.#get_func());
}
})
},
TypeCategory::Str => {
let take_func = format_ident!("take_{}", ident.to_string());
Some(quote! {
if pb.#has_func() {
o.#member=Some(pb.#take_func());
}
})
},
TypeCategory::Array => {
let take_func = format_ident!("take_{}", ident.to_string());
Some(quote! {
if pb.#has_func() {
o.#member=Some(pb.#take_func());
}
})
},
_ => {
let take_func = format_ident!("take_{}", ident.to_string());
let ty = bracketed_ty_info.unwrap().ty;
Some(quote! {
if pb.#has_func() {
let val = #ty::from(pb.#take_func());
o.#member=Some(val);
}
})
},
}
}
fn token_stream_for_field(
ast_result: &ASTResult,
member: &syn::Member,
ty: &syn::Type,
is_option: bool,
) -> Option<TokenStream> {
let ident = get_member_ident(ast_result, member)?;
let ty_info = match parse_ty(ast_result, ty) {
Ok(ty_info) => ty_info,
Err(e) => {
eprintln!("token_stream_for_field: {:?} with error: {}", member, e);
panic!()
},
}?;
match ident_category(ty_info.ident) {
TypeCategory::Array => {
assert_bracket_ty_is_some(ast_result, &ty_info);
token_stream_for_vec(ast_result, member, &ty_info.bracket_ty_info.unwrap())
},
TypeCategory::Map => {
assert_bracket_ty_is_some(ast_result, &ty_info);
token_stream_for_map(ast_result, member, &ty_info.bracket_ty_info.unwrap())
},
TypeCategory::Protobuf => {
// if the type wrapped by SingularPtrField, should call take first
let take = syn::Ident::new("take", Span::call_site());
// inner_type_ty would be the type of the field. (e.g value of AnyData)
let ty = ty_info.ty;
Some(quote! {
let some_value = pb.#member.#take();
if some_value.is_some() {
let struct_de_from_pb = #ty::from(some_value.unwrap());
o.#member = struct_de_from_pb;
}
})
},
TypeCategory::Enum => {
let ty = ty_info.ty;
Some(quote! {
let enum_de_from_pb = #ty::from(&pb.#member);
o.#member = enum_de_from_pb;
})
},
TypeCategory::Str => {
let take_ident = syn::Ident::new(&format!("take_{}", ident), Span::call_site());
if is_option {
Some(quote! {
if pb.#member.is_empty() {
o.#member = None;
} else {
o.#member = Some(pb.#take_ident());
}
})
} else {
Some(quote! {
o.#member = pb.#take_ident();
})
}
},
TypeCategory::Opt => token_stream_for_field(
ast_result,
member,
ty_info.bracket_ty_info.unwrap().ty,
true,
),
TypeCategory::Primitive | TypeCategory::Bytes => {
// eprintln!("😄 #{:?}", &field.name().unwrap());
if is_option {
Some(quote! { o.#member = Some(pb.#member.clone()); })
} else {
Some(quote! { o.#member = pb.#member.clone(); })
}
},
}
}
fn token_stream_for_vec(
ctxt: &ASTResult,
member: &syn::Member,
bracketed_type: &TyInfo,
) -> Option<TokenStream> {
let ident = get_member_ident(ctxt, member)?;
match ident_category(bracketed_type.ident) {
TypeCategory::Protobuf => {
let ty = bracketed_type.ty;
// Deserialize from pb struct of type vec, should call take_xx(), get the
// repeated_field and then calling the into_iter。
let take_ident = format_ident!("take_{}", ident.to_string());
Some(quote! {
o.#member = pb.#take_ident()
.into_iter()
.map(|m| #ty::from(m))
.collect();
})
},
TypeCategory::Bytes => {
// Vec<u8>
Some(quote! {
o.#member = pb.#member.clone();
})
},
TypeCategory::Str => {
let take_ident = format_ident!("take_{}", ident.to_string());
Some(quote! {
o.#member = pb.#take_ident().into_vec();
})
},
_ => {
let take_ident = format_ident!("take_{}", ident.to_string());
Some(quote! {
o.#member = pb.#take_ident();
})
},
}
}
fn token_stream_for_map(
ast_result: &ASTResult,
member: &syn::Member,
ty_info: &TyInfo,
) -> Option<TokenStream> {
let ident = get_member_ident(ast_result, member)?;
let take_ident = format_ident!("take_{}", ident.to_string());
let ty = ty_info.ty;
match ident_category(ty_info.ident) {
TypeCategory::Protobuf => Some(quote! {
let mut m: std::collections::HashMap<String, #ty> = std::collections::HashMap::new();
pb.#take_ident().into_iter().for_each(|(k,v)| {
m.insert(k.clone(), #ty::from(v));
});
o.#member = m;
}),
_ => Some(quote! {
let mut m: std::collections::HashMap<String, #ty> = std::collections::HashMap::new();
pb.#take_ident().into_iter().for_each(|(k,mut v)| {
m.insert(k.clone(), v);
});
o.#member = m;
}),
}
}

View File

@ -0,0 +1,39 @@
use flowy_ast::*;
use proc_macro2::TokenStream;
#[allow(dead_code)]
pub fn make_enum_token_stream(_ast_result: &ASTResult, cont: &ASTContainer) -> Option<TokenStream> {
let enum_ident = &cont.ident;
let pb_enum = cont.pb_attrs.pb_enum_type()?;
let build_to_pb_enum = cont.data.all_idents().map(|i| {
let token_stream: TokenStream = quote! {
#enum_ident::#i => crate::protobuf::#pb_enum::#i,
};
token_stream
});
let build_from_pb_enum = cont.data.all_idents().map(|i| {
let token_stream: TokenStream = quote! {
crate::protobuf::#pb_enum::#i => #enum_ident::#i,
};
token_stream
});
Some(quote! {
impl std::convert::From<&crate::protobuf::#pb_enum> for #enum_ident {
fn from(pb:&crate::protobuf::#pb_enum) -> Self {
match pb {
#(#build_from_pb_enum)*
}
}
}
impl std::convert::From<#enum_ident> for crate::protobuf::#pb_enum{
fn from(o: #enum_ident) -> crate::protobuf::#pb_enum {
match o {
#(#build_to_pb_enum)*
}
}
}
})
}

View File

@ -0,0 +1,76 @@
mod deserialize;
mod enum_serde;
mod serialize;
mod util;
use crate::proto_buf::{
deserialize::make_de_token_steam, enum_serde::make_enum_token_stream,
serialize::make_se_token_stream,
};
use flowy_ast::*;
use proc_macro2::TokenStream;
use std::default::Default;
pub fn expand_derive(input: &syn::DeriveInput) -> Result<TokenStream, Vec<syn::Error>> {
let ast_result = ASTResult::new();
let cont = match ASTContainer::from_ast(&ast_result, input) {
Some(cont) => cont,
None => return Err(ast_result.check().unwrap_err()),
};
let mut token_stream: TokenStream = TokenStream::default();
if let Some(de_token_stream) = make_de_token_steam(&ast_result, &cont) {
token_stream.extend(de_token_stream);
}
if let Some(se_token_stream) = make_se_token_stream(&ast_result, &cont) {
token_stream.extend(se_token_stream);
}
ast_result.check()?;
Ok(token_stream)
}
pub fn expand_enum_derive(input: &syn::DeriveInput) -> Result<TokenStream, Vec<syn::Error>> {
let ast_result = ASTResult::new();
let cont = match ASTContainer::from_ast(&ast_result, input) {
Some(cont) => cont,
None => return Err(ast_result.check().unwrap_err()),
};
let mut token_stream: TokenStream = TokenStream::default();
if let Some(enum_token_stream) = make_enum_token_stream(&ast_result, &cont) {
token_stream.extend(enum_token_stream);
}
ast_result.check()?;
Ok(token_stream)
}
// #[macro_use]
// macro_rules! impl_try_for_primitive_type {
// ($target:ident) => {
// impl std::convert::TryFrom<&$target> for $target {
// type Error = String;
// fn try_from(val: &$target) -> Result<Self, Self::Error> {
// Ok(val.clone()) } }
//
// impl std::convert::TryInto<$target> for $target {
// type Error = String;
//
// fn try_into(self) -> Result<Self, Self::Error> { Ok(self) }
// }
// };
// }
//
// impl_try_for_primitive_type!(String);
// impl_try_for_primitive_type!(i64);
// impl_try_for_primitive_type!(i32);
// impl_try_for_primitive_type!(i16);
// impl_try_for_primitive_type!(u64);
// impl_try_for_primitive_type!(u32);
// impl_try_for_primitive_type!(u16);
// impl_try_for_primitive_type!(bool);
// impl_try_for_primitive_type!(f64);
// impl_try_for_primitive_type!(f32);

View File

@ -0,0 +1,231 @@
#![allow(clippy::while_let_on_iterator)]
use proc_macro2::TokenStream;
use flowy_ast::*;
use crate::proto_buf::util::{get_member_ident, ident_category, TypeCategory};
pub fn make_se_token_stream(ast_result: &ASTResult, ast: &ASTContainer) -> Option<TokenStream> {
let pb_ty = ast.pb_attrs.pb_struct_type()?;
let struct_ident = &ast.ident;
let build_set_pb_fields = ast
.data
.all_fields()
.filter(|f| !f.pb_attrs.skip_pb_serializing())
.flat_map(|field| se_token_stream_for_field(ast_result, field, false));
let se_token_stream: TokenStream = quote! {
impl std::convert::TryInto<bytes::Bytes> for #struct_ident {
type Error = ::protobuf::ProtobufError;
fn try_into(self) -> Result<bytes::Bytes, Self::Error> {
use protobuf::Message;
let pb: crate::protobuf::#pb_ty = self.into();
let bytes = pb.write_to_bytes()?;
Ok(bytes::Bytes::from(bytes))
}
}
impl std::convert::TryInto<Vec<u8>> for #struct_ident {
type Error = ::protobuf::ProtobufError;
fn try_into(self) -> Result<Vec<u8>, Self::Error> {
use protobuf::Message;
let pb: crate::protobuf::#pb_ty = self.into();
let bytes = pb.write_to_bytes()?;
Ok(bytes)
}
}
impl std::convert::From<#struct_ident> for crate::protobuf::#pb_ty {
fn from(mut o: #struct_ident) -> crate::protobuf::#pb_ty {
let mut pb = crate::protobuf::#pb_ty::new();
#(#build_set_pb_fields)*
pb
}
}
};
Some(se_token_stream)
}
fn se_token_stream_for_field(
ast_result: &ASTResult,
field: &ASTField,
_take: bool,
) -> Option<TokenStream> {
if let Some(func) = &field.pb_attrs.serialize_pb_with() {
let member = &field.member;
Some(quote! { pb.#member=o.#func(); })
} else if field.pb_attrs.is_one_of() {
token_stream_for_one_of(ast_result, field)
} else {
gen_token_stream(ast_result, &field.member, field.ty, false)
}
}
fn token_stream_for_one_of(ast_result: &ASTResult, field: &ASTField) -> Option<TokenStream> {
let member = &field.member;
let ident = get_member_ident(ast_result, member)?;
let ty_info = match parse_ty(ast_result, field.ty) {
Ok(ty_info) => ty_info,
Err(e) => {
eprintln!(
"token_stream_for_one_of failed: {:?} with error: {}",
member, e
);
panic!();
},
}?;
let bracketed_ty_info = ty_info.bracket_ty_info.as_ref().as_ref();
let set_func = format_ident!("set_{}", ident.to_string());
match ident_category(bracketed_ty_info.unwrap().ident) {
TypeCategory::Protobuf => Some(quote! {
match o.#member {
Some(s) => { pb.#set_func(s.into()) }
None => {}
}
}),
TypeCategory::Enum => Some(quote! {
match o.#member {
Some(s) => { pb.#set_func(s.into()) }
None => {}
}
}),
_ => Some(quote! {
match o.#member {
Some(ref s) => { pb.#set_func(s.clone()) }
None => {}
}
}),
}
}
fn gen_token_stream(
ast_result: &ASTResult,
member: &syn::Member,
ty: &syn::Type,
is_option: bool,
) -> Option<TokenStream> {
let ty_info = match parse_ty(ast_result, ty) {
Ok(ty_info) => ty_info,
Err(e) => {
eprintln!("gen_token_stream failed: {:?} with error: {}", member, e);
panic!();
},
}?;
match ident_category(ty_info.ident) {
TypeCategory::Array => {
token_stream_for_vec(ast_result, member, ty_info.bracket_ty_info.unwrap().ty)
},
TypeCategory::Map => {
token_stream_for_map(ast_result, member, ty_info.bracket_ty_info.unwrap().ty)
},
TypeCategory::Str => {
if is_option {
Some(quote! {
match o.#member {
Some(ref s) => { pb.#member = s.to_string().clone(); }
None => { pb.#member = String::new(); }
}
})
} else {
Some(quote! { pb.#member = o.#member.clone(); })
}
},
TypeCategory::Protobuf => {
Some(quote! { pb.#member = ::protobuf::SingularPtrField::some(o.#member.into()); })
},
TypeCategory::Opt => gen_token_stream(
ast_result,
member,
ty_info.bracket_ty_info.unwrap().ty,
true,
),
TypeCategory::Enum => {
// let pb_enum_ident = format_ident!("{}", ty_info.ident.to_string());
// Some(quote! {
// flowy_protobuf::#pb_enum_ident::from_i32(self.#member.value()).unwrap();
// })
Some(quote! {
pb.#member = o.#member.into();
})
},
_ => Some(quote! { pb.#member = o.#member; }),
}
}
// e.g. pub cells: Vec<CellData>, the member will be cells, ty would be Vec
fn token_stream_for_vec(
ast_result: &ASTResult,
member: &syn::Member,
ty: &syn::Type,
) -> Option<TokenStream> {
let ty_info = match parse_ty(ast_result, ty) {
Ok(ty_info) => ty_info,
Err(e) => {
eprintln!(
"token_stream_for_vec failed: {:?} with error: {}",
member, e
);
panic!();
},
}?;
match ident_category(ty_info.ident) {
TypeCategory::Protobuf => Some(quote! {
pb.#member = ::protobuf::RepeatedField::from_vec(
o.#member
.into_iter()
.map(|m| m.into())
.collect());
}),
TypeCategory::Bytes => Some(quote! { pb.#member = o.#member.clone(); }),
TypeCategory::Primitive => Some(quote! {
pb.#member = o.#member.clone();
}),
_ => Some(quote! {
pb.#member = ::protobuf::RepeatedField::from_vec(o.#member.clone());
}),
}
}
// e.g. pub cells: HashMap<xx, xx>
fn token_stream_for_map(
ast_result: &ASTResult,
member: &syn::Member,
ty: &syn::Type,
) -> Option<TokenStream> {
// The key of the hashmap must be string
let ty_info = match parse_ty(ast_result, ty) {
Ok(ty_info) => ty_info,
Err(e) => {
eprintln!(
"token_stream_for_map failed: {:?} with error: {}",
member, e
);
panic!();
},
}?;
let value_ty = ty_info.ty;
match ident_category(ty_info.ident) {
TypeCategory::Protobuf => Some(quote! {
let mut m: std::collections::HashMap<String, crate::protobuf::#value_ty> = std::collections::HashMap::new();
o.#member.into_iter().for_each(|(k,v)| {
m.insert(k.clone(), v.into());
});
pb.#member = m;
}),
_ => Some(quote! {
let mut m: std::collections::HashMap<String, #value_ty> = std::collections::HashMap::new();
o.#member.iter().for_each(|(k,v)| {
m.insert(k.clone(), v.clone());
});
pb.#member = m;
}),
}
}

View File

@ -0,0 +1,119 @@
use dashmap::{DashMap, DashSet};
use flowy_ast::{ASTResult, TyInfo};
use flowy_codegen::ProtoCache;
use lazy_static::lazy_static;
use std::fs::File;
use std::io::Read;
use std::sync::atomic::{AtomicBool, Ordering};
use walkdir::WalkDir;
pub fn ident_category(ident: &syn::Ident) -> TypeCategory {
let ident_str = ident.to_string();
category_from_str(ident_str)
}
pub(crate) fn get_member_ident<'a>(
ast_result: &ASTResult,
member: &'a syn::Member,
) -> Option<&'a syn::Ident> {
if let syn::Member::Named(ref ident) = member {
Some(ident)
} else {
ast_result.error_spanned_by(
member,
"Unsupported member, shouldn't be self.0".to_string(),
);
None
}
}
pub fn assert_bracket_ty_is_some(ast_result: &ASTResult, ty_info: &TyInfo) {
if ty_info.bracket_ty_info.is_none() {
ast_result.error_spanned_by(
ty_info.ty,
"Invalid bracketed type when gen de token steam".to_string(),
);
}
}
lazy_static! {
static ref READ_FLAG: DashSet<String> = DashSet::new();
static ref CACHE_INFO: DashMap<TypeCategory, Vec<String>> = DashMap::new();
static ref IS_LOAD: AtomicBool = AtomicBool::new(false);
}
#[derive(Eq, Hash, PartialEq)]
pub enum TypeCategory {
Array,
Map,
Str,
Protobuf,
Bytes,
Enum,
Opt,
Primitive,
}
// auto generate, do not edit
pub fn category_from_str(type_str: String) -> TypeCategory {
if !IS_LOAD.load(Ordering::SeqCst) {
IS_LOAD.store(true, Ordering::SeqCst);
// Dependents on another crate file is not good, just leave it here.
// Maybe find another way to read the .cache in the future.
let cache_dir = format!("{}/../flowy-codegen/.cache", env!("CARGO_MANIFEST_DIR"));
for path in WalkDir::new(cache_dir)
.into_iter()
.filter_map(|e| e.ok())
.filter(|e| e.path().file_stem().unwrap().to_str().unwrap() == "proto_cache")
.map(|e| e.path().to_str().unwrap().to_string())
{
match read_file(&path) {
None => {},
Some(s) => {
let cache: ProtoCache = serde_json::from_str(&s).unwrap();
CACHE_INFO
.entry(TypeCategory::Protobuf)
.or_default()
.extend(cache.structs);
CACHE_INFO
.entry(TypeCategory::Enum)
.or_default()
.extend(cache.enums);
},
}
}
}
if let Some(protobuf_tys) = CACHE_INFO.get(&TypeCategory::Protobuf) {
if protobuf_tys.contains(&type_str) {
return TypeCategory::Protobuf;
}
}
if let Some(enum_tys) = CACHE_INFO.get(&TypeCategory::Enum) {
if enum_tys.contains(&type_str) {
return TypeCategory::Enum;
}
}
match type_str.as_str() {
"Vec" => TypeCategory::Array,
"HashMap" => TypeCategory::Map,
"u8" => TypeCategory::Bytes,
"String" => TypeCategory::Str,
"Option" => TypeCategory::Opt,
_ => TypeCategory::Primitive,
}
}
fn read_file(path: &str) -> Option<String> {
match File::open(path) {
Ok(mut file) => {
let mut content = String::new();
match file.read_to_string(&mut content) {
Ok(_) => Some(content),
Err(_) => None,
}
},
Err(_) => None,
}
}

View File

@ -0,0 +1,5 @@
#[tokio::test]
async fn tests() {
let _t = trybuild::TestCases::new();
// t.pass("tests/protobuf_enum.rs");
}