chore: rename

This commit is contained in:
nathan 2022-12-01 16:03:03 +08:00
parent 44f9254c0a
commit fa01dbb572
21 changed files with 209 additions and 159 deletions

View File

@ -830,6 +830,7 @@ dependencies = [
"phf 0.8.0",
"protoc-bin-vendored",
"protoc-rust",
"quote",
"serde",
"serde_json",
"similar",

View File

@ -1,6 +1,6 @@
use crate::grid::filter_test::script::FilterScript::*;
use crate::grid::filter_test::script::GridFilterTest;
use flowy_grid::entities::{ChecklistFilterCondition, SelectOptionCondition};
use flowy_grid::entities::ChecklistFilterCondition;
#[tokio::test]
async fn grid_filter_checklist_is_incomplete_test() {

47
shared-lib/Cargo.lock generated
View File

@ -375,14 +375,38 @@ dependencies = [
"syn",
]
[[package]]
name = "flowy-codegen"
version = "0.1.0"
dependencies = [
"cmd_lib",
"console",
"fancy-regex",
"flowy-ast",
"itertools",
"lazy_static",
"log",
"phf 0.8.0",
"protoc-bin-vendored",
"protoc-rust",
"quote",
"serde",
"serde_json",
"similar",
"syn",
"tera",
"toml",
"walkdir",
]
[[package]]
name = "flowy-derive"
version = "0.1.0"
dependencies = [
"dashmap",
"flowy-ast",
"flowy-codegen",
"lazy_static",
"lib-infra",
"log",
"proc-macro2",
"quote",
@ -398,8 +422,8 @@ name = "flowy-error-code"
version = "0.1.0"
dependencies = [
"derive_more",
"flowy-codegen",
"flowy-derive",
"lib-infra",
"protobuf",
]
@ -408,6 +432,7 @@ name = "flowy-http-model"
version = "0.1.0"
dependencies = [
"bytes",
"flowy-codegen",
"flowy-derive",
"lib-infra",
"md5",
@ -785,27 +810,10 @@ version = "0.1.0"
dependencies = [
"bytes",
"chrono",
"cmd_lib",
"console",
"fancy-regex",
"flowy-ast",
"futures-core",
"itertools",
"lazy_static",
"log",
"phf 0.8.0",
"pin-project",
"protoc-bin-vendored",
"protoc-rust",
"rand 0.8.5",
"serde",
"serde_json",
"similar",
"syn",
"tera",
"tokio",
"toml",
"walkdir",
]
[[package]]
@ -839,6 +847,7 @@ dependencies = [
"bytes",
"dashmap",
"env_logger",
"flowy-codegen",
"flowy-derive",
"futures",
"futures-channel",

View File

@ -1,7 +1,7 @@
#![allow(clippy::all)]
#![allow(unused_attributes)]
#![allow(unused_assignments)]
use crate::{attr, ty_ext::*, AttrsContainer, Ctxt};
use crate::{attr, ty_ext::*, ASTResult, AttrsContainer};
use syn::{self, punctuated::Punctuated};
pub struct ASTContainer<'a> {
@ -14,8 +14,8 @@ pub struct ASTContainer<'a> {
}
impl<'a> ASTContainer<'a> {
pub fn from_ast(cx: &Ctxt, ast: &'a syn::DeriveInput) -> Option<ASTContainer<'a>> {
let attrs = AttrsContainer::from_ast(cx, ast);
pub fn from_ast(ast_result: &ASTResult, ast: &'a syn::DeriveInput) -> Option<ASTContainer<'a>> {
let attrs = AttrsContainer::from_ast(ast_result, ast);
// syn::DeriveInput
// 1. syn::DataUnion
// 2. syn::DataStruct
@ -23,16 +23,16 @@ impl<'a> ASTContainer<'a> {
let data = match &ast.data {
syn::Data::Struct(data) => {
// https://docs.rs/syn/1.0.48/syn/struct.DataStruct.html
let (style, fields) = struct_from_ast(cx, &data.fields);
let (style, fields) = struct_from_ast(ast_result, &data.fields);
ASTData::Struct(style, fields)
}
syn::Data::Union(_) => {
cx.error_spanned_by(ast, "Does not support derive for unions");
ast_result.error_spanned_by(ast, "Does not support derive for unions");
return None;
}
syn::Data::Enum(data) => {
// https://docs.rs/syn/1.0.48/syn/struct.DataEnum.html
ASTData::Enum(enum_from_ast(cx, &ast.ident, &data.variants, &ast.attrs))
ASTData::Enum(enum_from_ast(ast_result, &ast.ident, &data.variants, &ast.attrs))
}
};
@ -115,7 +115,7 @@ pub struct ASTField<'a> {
}
impl<'a> ASTField<'a> {
pub fn new(cx: &Ctxt, field: &'a syn::Field, index: usize) -> Result<Self, String> {
pub fn new(cx: &ASTResult, field: &'a syn::Field, index: usize) -> Result<Self, String> {
let mut bracket_inner_ty = None;
let mut bracket_ty = None;
let mut bracket_category = Some(BracketCategory::Other);
@ -202,7 +202,7 @@ pub enum ASTStyle {
Unit,
}
pub fn struct_from_ast<'a>(cx: &Ctxt, fields: &'a syn::Fields) -> (ASTStyle, Vec<ASTField<'a>>) {
pub fn struct_from_ast<'a>(cx: &ASTResult, fields: &'a syn::Fields) -> (ASTStyle, Vec<ASTField<'a>>) {
match fields {
syn::Fields::Named(fields) => (ASTStyle::Struct, fields_from_ast(cx, &fields.named)),
syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => {
@ -214,7 +214,7 @@ pub fn struct_from_ast<'a>(cx: &Ctxt, fields: &'a syn::Fields) -> (ASTStyle, Vec
}
pub fn enum_from_ast<'a>(
cx: &Ctxt,
cx: &ASTResult,
ident: &syn::Ident,
variants: &'a Punctuated<syn::Variant, Token![,]>,
enum_attrs: &[syn::Attribute],
@ -235,7 +235,7 @@ pub fn enum_from_ast<'a>(
.collect()
}
fn fields_from_ast<'a>(cx: &Ctxt, fields: &'a Punctuated<syn::Field, Token![,]>) -> Vec<ASTField<'a>> {
fn fields_from_ast<'a>(cx: &ASTResult, fields: &'a Punctuated<syn::Field, Token![,]>) -> Vec<ASTField<'a>> {
fields
.iter()
.enumerate()

View File

@ -1,5 +1,5 @@
#![allow(clippy::all)]
use crate::{symbol::*, Ctxt};
use crate::{symbol::*, ASTResult};
use quote::ToTokens;
use syn::{
self,
@ -19,41 +19,46 @@ pub struct AttrsContainer {
impl AttrsContainer {
/// Extract out the `#[pb(...)]` attributes from an item.
pub fn from_ast(cx: &Ctxt, item: &syn::DeriveInput) -> Self {
let mut pb_struct_type = ASTAttr::none(cx, PB_STRUCT);
let mut pb_enum_type = ASTAttr::none(cx, PB_ENUM);
for meta_item in item.attrs.iter().flat_map(|attr| get_meta_items(cx, attr)).flatten() {
pub fn from_ast(ast_result: &ASTResult, item: &syn::DeriveInput) -> Self {
let mut pb_struct_type = ASTAttr::none(ast_result, PB_STRUCT);
let mut pb_enum_type = ASTAttr::none(ast_result, PB_ENUM);
for meta_item in item
.attrs
.iter()
.flat_map(|attr| get_meta_items(ast_result, attr))
.flatten()
{
match &meta_item {
// Parse `#[pb(struct = "Type")]
Meta(NameValue(m)) if m.path == PB_STRUCT => {
if let Ok(into_ty) = parse_lit_into_ty(cx, PB_STRUCT, &m.lit) {
if let Ok(into_ty) = parse_lit_into_ty(ast_result, PB_STRUCT, &m.lit) {
pb_struct_type.set_opt(&m.path, Some(into_ty));
}
}
// Parse `#[pb(enum = "Type")]
Meta(NameValue(m)) if m.path == PB_ENUM => {
if let Ok(into_ty) = parse_lit_into_ty(cx, PB_ENUM, &m.lit) {
if let Ok(into_ty) = parse_lit_into_ty(ast_result, PB_ENUM, &m.lit) {
pb_enum_type.set_opt(&m.path, Some(into_ty));
}
}
Meta(meta_item) => {
let path = meta_item.path().into_token_stream().to_string().replace(' ', "");
cx.error_spanned_by(meta_item.path(), format!("unknown pb container attribute `{}`", path));
ast_result.error_spanned_by(meta_item.path(), format!("unknown pb container attribute `{}`", path));
}
Lit(lit) => {
cx.error_spanned_by(lit, "unexpected literal in pb container attribute");
ast_result.error_spanned_by(lit, "unexpected literal in pb container attribute");
}
}
}
match &item.data {
syn::Data::Struct(_) => {
pb_struct_type.set_if_none(default_pb_type(&cx, &item.ident));
pb_struct_type.set_if_none(default_pb_type(&ast_result, &item.ident));
}
syn::Data::Enum(_) => {
pb_enum_type.set_if_none(default_pb_type(&cx, &item.ident));
pb_enum_type.set_if_none(default_pb_type(&ast_result, &item.ident));
}
_ => {}
}
@ -75,16 +80,16 @@ impl AttrsContainer {
}
struct ASTAttr<'c, T> {
cx: &'c Ctxt,
ast_result: &'c ASTResult,
name: Symbol,
tokens: TokenStream,
value: Option<T>,
}
impl<'c, T> ASTAttr<'c, T> {
fn none(cx: &'c Ctxt, name: Symbol) -> Self {
fn none(ast_result: &'c ASTResult, name: Symbol) -> Self {
ASTAttr {
cx,
ast_result,
name,
tokens: TokenStream::new(),
value: None,
@ -95,7 +100,7 @@ impl<'c, T> ASTAttr<'c, T> {
let tokens = obj.into_token_stream();
if self.value.is_some() {
self.cx
self.ast_result
.error_spanned_by(tokens, format!("duplicate attribute `{}`", self.name));
} else {
self.tokens = tokens;
@ -141,20 +146,25 @@ pub struct ASTAttrField {
impl ASTAttrField {
/// Extract out the `#[pb(...)]` attributes from a struct field.
pub fn from_ast(cx: &Ctxt, index: usize, field: &syn::Field) -> Self {
let mut pb_index = ASTAttr::none(cx, PB_INDEX);
let mut pb_one_of = BoolAttr::none(cx, PB_ONE_OF);
let mut serialize_with = ASTAttr::none(cx, SERIALIZE_WITH);
let mut skip_serializing = BoolAttr::none(cx, SKIP_SERIALIZING);
let mut deserialize_with = ASTAttr::none(cx, DESERIALIZE_WITH);
let mut skip_deserializing = BoolAttr::none(cx, SKIP_DESERIALIZING);
pub fn from_ast(ast_result: &ASTResult, index: usize, field: &syn::Field) -> Self {
let mut pb_index = ASTAttr::none(ast_result, PB_INDEX);
let mut pb_one_of = BoolAttr::none(ast_result, PB_ONE_OF);
let mut serialize_with = ASTAttr::none(ast_result, SERIALIZE_WITH);
let mut skip_serializing = BoolAttr::none(ast_result, SKIP_SERIALIZING);
let mut deserialize_with = ASTAttr::none(ast_result, DESERIALIZE_WITH);
let mut skip_deserializing = BoolAttr::none(ast_result, SKIP_DESERIALIZING);
let ident = match &field.ident {
Some(ident) => ident.to_string(),
None => index.to_string(),
};
for meta_item in field.attrs.iter().flat_map(|attr| get_meta_items(cx, attr)).flatten() {
for meta_item in field
.attrs
.iter()
.flat_map(|attr| get_meta_items(ast_result, attr))
.flatten()
{
match &meta_item {
// Parse `#[pb(skip)]`
Meta(Path(word)) if word == SKIP => {
@ -176,25 +186,25 @@ impl ASTAttrField {
// Parse `#[pb(serialize_with = "...")]`
Meta(NameValue(m)) if m.path == SERIALIZE_WITH => {
if let Ok(path) = parse_lit_into_expr_path(cx, SERIALIZE_WITH, &m.lit) {
if let Ok(path) = parse_lit_into_expr_path(ast_result, SERIALIZE_WITH, &m.lit) {
serialize_with.set(&m.path, path);
}
}
// Parse `#[pb(deserialize_with = "...")]`
Meta(NameValue(m)) if m.path == DESERIALIZE_WITH => {
if let Ok(path) = parse_lit_into_expr_path(cx, DESERIALIZE_WITH, &m.lit) {
if let Ok(path) = parse_lit_into_expr_path(ast_result, DESERIALIZE_WITH, &m.lit) {
deserialize_with.set(&m.path, path);
}
}
Meta(meta_item) => {
let path = meta_item.path().into_token_stream().to_string().replace(' ', "");
cx.error_spanned_by(meta_item.path(), format!("unknown field attribute `{}`", path));
ast_result.error_spanned_by(meta_item.path(), format!("unknown field attribute `{}`", path));
}
Lit(lit) => {
cx.error_spanned_by(lit, "unexpected literal in pb field attribute");
ast_result.error_spanned_by(lit, "unexpected literal in pb field attribute");
}
}
}
@ -262,7 +272,12 @@ pub struct ASTEnumAttrVariant {
}
impl ASTEnumAttrVariant {
pub fn from_ast(ctxt: &Ctxt, ident: &syn::Ident, variant: &syn::Variant, enum_attrs: &[syn::Attribute]) -> Self {
pub fn from_ast(
ast_result: &ASTResult,
ident: &syn::Ident,
variant: &syn::Variant,
enum_attrs: &[syn::Attribute],
) -> Self {
let enum_item_name = variant.ident.to_string();
let enum_name = ident.to_string();
let mut value = String::new();
@ -276,7 +291,7 @@ impl ASTEnumAttrVariant {
value = lit_int.base10_digits().to_string();
}
}
let event_attrs = get_event_attrs_from(ctxt, &variant.attrs, enum_attrs);
let event_attrs = get_event_attrs_from(ast_result, &variant.attrs, enum_attrs);
ASTEnumAttrVariant {
enum_name,
enum_item_name,
@ -298,7 +313,11 @@ impl ASTEnumAttrVariant {
}
}
fn get_event_attrs_from(ctxt: &Ctxt, variant_attrs: &[syn::Attribute], enum_attrs: &[syn::Attribute]) -> EventAttrs {
fn get_event_attrs_from(
ast_result: &ASTResult,
variant_attrs: &[syn::Attribute],
enum_attrs: &[syn::Attribute],
) -> EventAttrs {
let mut event_attrs = EventAttrs {
input: None,
output: None,
@ -327,7 +346,8 @@ fn get_event_attrs_from(ctxt: &Ctxt, variant_attrs: &[syn::Attribute], enum_attr
if let syn::Lit::Str(s) = &name_value.lit {
let input_type = parse_lit_str(s)
.map_err(|_| {
ctxt.error_spanned_by(s, format!("failed to parse request deserializer {:?}", s.value()))
ast_result
.error_spanned_by(s, format!("failed to parse request deserializer {:?}", s.value()))
})
.unwrap();
event_attrs.input = Some(input_type);
@ -338,7 +358,8 @@ fn get_event_attrs_from(ctxt: &Ctxt, variant_attrs: &[syn::Attribute], enum_attr
if let syn::Lit::Str(s) = &name_value.lit {
let output_type = parse_lit_str(s)
.map_err(|_| {
ctxt.error_spanned_by(s, format!("failed to parse response deserializer {:?}", s.value()))
ast_result
.error_spanned_by(s, format!("failed to parse response deserializer {:?}", s.value()))
})
.unwrap();
event_attrs.output = Some(output_type);
@ -350,13 +371,13 @@ fn get_event_attrs_from(ctxt: &Ctxt, variant_attrs: &[syn::Attribute], enum_attr
event_attrs.ignore = true;
}
}
Lit(s) => ctxt.error_spanned_by(s, "unexpected attribute"),
_ => ctxt.error_spanned_by(meta_item, "unexpected attribute"),
Lit(s) => ast_result.error_spanned_by(s, "unexpected attribute"),
_ => ast_result.error_spanned_by(meta_item, "unexpected attribute"),
};
let attr_meta_items_info = variant_attrs
.iter()
.flat_map(|attr| match get_meta_items(ctxt, attr) {
.flat_map(|attr| match get_meta_items(ast_result, attr) {
Ok(items) => Some((attr, items)),
Err(_) => None,
})
@ -372,7 +393,7 @@ fn get_event_attrs_from(ctxt: &Ctxt, variant_attrs: &[syn::Attribute], enum_attr
event_attrs
}
pub fn get_meta_items(cx: &Ctxt, attr: &syn::Attribute) -> Result<Vec<syn::NestedMeta>, ()> {
pub fn get_meta_items(cx: &ASTResult, attr: &syn::Attribute) -> Result<Vec<syn::NestedMeta>, ()> {
if attr.path != PB_ATTRS && attr.path != EVENT {
return Ok(vec![]);
}
@ -392,16 +413,17 @@ pub fn get_meta_items(cx: &Ctxt, attr: &syn::Attribute) -> Result<Vec<syn::Neste
}
}
fn parse_lit_into_expr_path(cx: &Ctxt, attr_name: Symbol, lit: &syn::Lit) -> Result<syn::ExprPath, ()> {
let string = get_lit_str(cx, attr_name, lit)?;
parse_lit_str(string).map_err(|_| cx.error_spanned_by(lit, format!("failed to parse path: {:?}", string.value())))
fn parse_lit_into_expr_path(ast_result: &ASTResult, attr_name: Symbol, lit: &syn::Lit) -> Result<syn::ExprPath, ()> {
let string = get_lit_str(ast_result, attr_name, lit)?;
parse_lit_str(string)
.map_err(|_| ast_result.error_spanned_by(lit, format!("failed to parse path: {:?}", string.value())))
}
fn get_lit_str<'a>(cx: &Ctxt, attr_name: Symbol, lit: &'a syn::Lit) -> Result<&'a syn::LitStr, ()> {
fn get_lit_str<'a>(ast_result: &ASTResult, attr_name: Symbol, lit: &'a syn::Lit) -> Result<&'a syn::LitStr, ()> {
if let syn::Lit::Str(lit) = lit {
Ok(lit)
} else {
cx.error_spanned_by(
ast_result.error_spanned_by(
lit,
format!(
"expected pb {} attribute to be a string: `{} = \"...\"`",
@ -412,11 +434,11 @@ fn get_lit_str<'a>(cx: &Ctxt, attr_name: Symbol, lit: &'a syn::Lit) -> Result<&'
}
}
fn parse_lit_into_ty(cx: &Ctxt, attr_name: Symbol, lit: &syn::Lit) -> Result<syn::Type, ()> {
let string = get_lit_str(cx, attr_name, lit)?;
fn parse_lit_into_ty(ast_result: &ASTResult, attr_name: Symbol, lit: &syn::Lit) -> Result<syn::Type, ()> {
let string = get_lit_str(ast_result, attr_name, lit)?;
parse_lit_str(string).map_err(|_| {
cx.error_spanned_by(
ast_result.error_spanned_by(
lit,
format!("failed to parse type: {} = {:?}", attr_name, string.value()),
)
@ -448,7 +470,7 @@ fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree {
token
}
fn default_pb_type(ctxt: &Ctxt, ident: &syn::Ident) -> syn::Type {
fn default_pb_type(ast_result: &ASTResult, ident: &syn::Ident) -> syn::Type {
let take_ident = ident.to_string();
let lit_str = syn::LitStr::new(&take_ident, ident.span());
if let Ok(tokens) = spanned_tokens(&lit_str) {
@ -456,7 +478,7 @@ fn default_pb_type(ctxt: &Ctxt, ident: &syn::Ident) -> syn::Type {
return pb_struct_ty;
}
}
ctxt.error_spanned_by(ident, format!("❌ Can't find {} protobuf struct", take_ident));
ast_result.error_spanned_by(ident, format!("❌ Can't find {} protobuf struct", take_ident));
panic!()
}
@ -494,8 +516,8 @@ pub fn ungroup(mut ty: &syn::Type) -> &syn::Type {
struct BoolAttr<'c>(ASTAttr<'c, ()>);
impl<'c> BoolAttr<'c> {
fn none(cx: &'c Ctxt, name: Symbol) -> Self {
BoolAttr(ASTAttr::none(cx, name))
fn none(ast_result: &'c ASTResult, name: Symbol) -> Self {
BoolAttr(ASTAttr::none(ast_result, name))
}
fn set_true<A: ToTokens>(&mut self, obj: A) {

View File

@ -2,13 +2,13 @@ use quote::ToTokens;
use std::{cell::RefCell, fmt::Display, thread};
#[derive(Default)]
pub struct Ctxt {
pub struct ASTResult {
errors: RefCell<Option<Vec<syn::Error>>>,
}
impl Ctxt {
impl ASTResult {
pub fn new() -> Self {
Ctxt {
ASTResult {
errors: RefCell::new(Some(Vec::new())),
}
}
@ -34,7 +34,7 @@ impl Ctxt {
}
}
impl Drop for Ctxt {
impl Drop for ASTResult {
fn drop(&mut self) {
if !thread::panicking() && self.errors.borrow().is_some() {
panic!("forgot to check for errors");

View File

@ -8,10 +8,9 @@ mod ast;
mod attr;
mod ctxt;
pub mod event_ast;
pub mod symbol;
pub mod ty_ext;
pub use self::{symbol::*, ty_ext::*};
pub use ast::*;
pub use attr::*;
pub use ctxt::Ctxt;
pub use ctxt::ASTResult;

View File

@ -1,4 +1,4 @@
use crate::Ctxt;
use crate::ASTResult;
use syn::{self, AngleBracketedGenericArguments, PathSegment};
#[derive(Eq, PartialEq, Debug)]
@ -41,7 +41,7 @@ impl<'a> TyInfo<'a> {
}
}
pub fn parse_ty<'a>(ctxt: &Ctxt, ty: &'a syn::Type) -> Result<Option<TyInfo<'a>>, String> {
pub fn parse_ty<'a>(ast_result: &ASTResult, ty: &'a syn::Type) -> Result<Option<TyInfo<'a>>, String> {
// Type -> TypePath -> Path -> PathSegment -> PathArguments ->
// AngleBracketedGenericArguments -> GenericArgument -> Type.
if let syn::Type::Path(ref p) = ty {
@ -58,9 +58,9 @@ pub fn parse_ty<'a>(ctxt: &Ctxt, ty: &'a syn::Type) -> Result<Option<TyInfo<'a>>
return if let syn::PathArguments::AngleBracketed(ref bracketed) = seg.arguments {
match seg.ident.to_string().as_ref() {
"HashMap" => generate_hashmap_ty_info(ctxt, ty, seg, bracketed),
"Vec" => generate_vec_ty_info(ctxt, seg, bracketed),
"Option" => generate_option_ty_info(ctxt, ty, seg, bracketed),
"HashMap" => generate_hashmap_ty_info(ast_result, ty, seg, bracketed),
"Vec" => generate_vec_ty_info(ast_result, seg, bracketed),
"Option" => generate_option_ty_info(ast_result, ty, seg, bracketed),
_ => {
return Err(format!("Unsupported ty {}", seg.ident));
}
@ -92,7 +92,7 @@ fn parse_bracketed(bracketed: &AngleBracketedGenericArguments) -> Vec<&syn::Type
}
pub fn generate_hashmap_ty_info<'a>(
ctxt: &Ctxt,
ast_result: &ASTResult,
ty: &'a syn::Type,
path_segment: &'a PathSegment,
bracketed: &'a AngleBracketedGenericArguments,
@ -102,9 +102,9 @@ pub fn generate_hashmap_ty_info<'a>(
return Ok(None);
}
let types = parse_bracketed(bracketed);
let key = parse_ty(ctxt, types[0])?.unwrap().ident.to_string();
let value = parse_ty(ctxt, types[1])?.unwrap().ident.to_string();
let bracket_ty_info = Box::new(parse_ty(ctxt, types[1])?);
let key = parse_ty(ast_result, types[0])?.unwrap().ident.to_string();
let value = parse_ty(ast_result, types[1])?.unwrap().ident.to_string();
let bracket_ty_info = Box::new(parse_ty(ast_result, types[1])?);
Ok(Some(TyInfo {
ident: &path_segment.ident,
ty,
@ -114,14 +114,14 @@ pub fn generate_hashmap_ty_info<'a>(
}
fn generate_option_ty_info<'a>(
ctxt: &Ctxt,
ast_result: &ASTResult,
ty: &'a syn::Type,
path_segment: &'a PathSegment,
bracketed: &'a AngleBracketedGenericArguments,
) -> Result<Option<TyInfo<'a>>, String> {
assert_eq!(path_segment.ident.to_string(), "Option".to_string());
let types = parse_bracketed(bracketed);
let bracket_ty_info = Box::new(parse_ty(ctxt, types[0])?);
let bracket_ty_info = Box::new(parse_ty(ast_result, types[0])?);
Ok(Some(TyInfo {
ident: &path_segment.ident,
ty,
@ -131,7 +131,7 @@ fn generate_option_ty_info<'a>(
}
fn generate_vec_ty_info<'a>(
ctxt: &Ctxt,
ast_result: &ASTResult,
path_segment: &'a PathSegment,
bracketed: &'a AngleBracketedGenericArguments,
) -> Result<Option<TyInfo<'a>>, String> {
@ -139,7 +139,7 @@ fn generate_vec_ty_info<'a>(
return Ok(None);
}
if let syn::GenericArgument::Type(ref bracketed_type) = bracketed.args.first().unwrap() {
let bracketed_ty_info = Box::new(parse_ty(ctxt, bracketed_type)?);
let bracketed_ty_info = Box::new(parse_ty(ast_result, bracketed_type)?);
return Ok(Some(TyInfo {
ident: &path_segment.ident,
ty: bracketed_type,

View File

@ -10,6 +10,7 @@ log = "0.4.14"
serde = { version = "1.0", features = ["derive"]}
serde_json = "1.0"
flowy-ast = { path = "../flowy-ast"}
quote = "1.0"
cmd_lib = { version = "1", optional = true }
protoc-rust = { version = "2", optional = true }
@ -27,7 +28,6 @@ toml = {version = "0.5.8", optional = true}
[features]
proto_gen = [
"similar",

View File

@ -1,4 +1,4 @@
use crate::ASTEnumAttrVariant;
use flowy_ast::ASTEnumAttrVariant;
pub struct EventASTContext {
pub event: syn::Ident,

View File

@ -1,7 +1,8 @@
use super::event_template::*;
use crate::dart_event::ast::EventASTContext;
use crate::flowy_toml::{parse_crate_config_from, CrateConfig};
use crate::util::{is_crate_dir, is_hidden, path_string_with_component, read_file};
use flowy_ast::{event_ast::*, *};
use flowy_ast::ASTResult;
use std::fs::File;
use std::io::Write;
use std::path::PathBuf;
@ -117,10 +118,14 @@ pub fn parse_event_crate(event_crate: &DartEventCrate) -> Vec<EventASTContext> {
.iter()
.map(|item| match item {
Item::Enum(item_enum) => {
let ctxt = Ctxt::new();
let attrs =
flowy_ast::enum_from_ast(&ctxt, &item_enum.ident, &item_enum.variants, &item_enum.attrs);
ctxt.check().unwrap();
let ast_result = ASTResult::new();
let attrs = flowy_ast::enum_from_ast(
&ast_result,
&item_enum.ident,
&item_enum.variants,
&item_enum.attrs,
);
ast_result.check().unwrap();
attrs
.iter()
.filter(|attr| !attr.attrs.event_attrs.ignore)

View File

@ -1,4 +1,5 @@
#![allow(clippy::module_inception)]
mod ast;
mod dart_event;
mod event_template;

View File

@ -15,3 +15,6 @@ pub struct ProtoCache {
pub structs: Vec<String>,
pub enums: Vec<String>,
}
#[macro_use]
extern crate quote;

View File

@ -115,11 +115,11 @@ pub fn get_ast_structs(ast: &syn::File) -> Vec<Struct> {
// let mut content = format!("{:#?}", &ast);
// let mut file = File::create("./foo.txt").unwrap();
// file.write_all(content.as_bytes()).unwrap();
let ctxt = Ctxt::new();
let ast_result = ASTResult::new();
let mut proto_structs: Vec<Struct> = vec![];
ast.items.iter().for_each(|item| {
if let Item::Struct(item_struct) = item {
let (_, fields) = struct_from_ast(&ctxt, &item_struct.fields);
let (_, fields) = struct_from_ast(&ast_result, &item_struct.fields);
if fields.iter().filter(|f| f.attrs.pb_index().is_some()).count() > 0 {
proto_structs.push(Struct {
@ -129,25 +129,25 @@ pub fn get_ast_structs(ast: &syn::File) -> Vec<Struct> {
}
}
});
ctxt.check().unwrap();
ast_result.check().unwrap();
proto_structs
}
pub fn get_ast_enums(ast: &syn::File) -> Vec<FlowyEnum> {
let mut flowy_enums: Vec<FlowyEnum> = vec![];
let ctxt = Ctxt::new();
let ast_result = ASTResult::new();
ast.items.iter().for_each(|item| {
// https://docs.rs/syn/1.0.54/syn/enum.Item.html
if let Item::Enum(item_enum) = item {
let attrs = flowy_ast::enum_from_ast(&ctxt, &item_enum.ident, &item_enum.variants, &ast.attrs);
let attrs = flowy_ast::enum_from_ast(&ast_result, &item_enum.ident, &item_enum.variants, &ast.attrs);
flowy_enums.push(FlowyEnum {
name: item_enum.ident.to_string(),
attrs,
});
}
});
ctxt.check().unwrap();
ast_result.check().unwrap();
flowy_enums
}

View File

@ -2,7 +2,7 @@ use crate::proto_buf::util::*;
use flowy_ast::*;
use proc_macro2::{Span, TokenStream};
pub fn make_de_token_steam(ctxt: &Ctxt, ast: &ASTContainer) -> Option<TokenStream> {
pub fn make_de_token_steam(ctxt: &ASTResult, ast: &ASTContainer) -> Option<TokenStream> {
let pb_ty = ast.attrs.pb_struct_type()?;
let struct_ident = &ast.ident;
@ -58,7 +58,7 @@ pub fn make_de_token_steam(ctxt: &Ctxt, ast: &ASTContainer) -> Option<TokenStrea
// None
}
fn token_stream_for_one_of(ctxt: &Ctxt, field: &ASTField) -> Option<TokenStream> {
fn token_stream_for_one_of(ctxt: &ASTResult, field: &ASTField) -> Option<TokenStream> {
let member = &field.member;
let ident = get_member_ident(ctxt, member)?;
let ty_info = match parse_ty(ctxt, field.ty) {
@ -118,9 +118,14 @@ fn token_stream_for_one_of(ctxt: &Ctxt, field: &ASTField) -> Option<TokenStream>
}
}
fn token_stream_for_field(ctxt: &Ctxt, member: &syn::Member, ty: &syn::Type, is_option: bool) -> Option<TokenStream> {
let ident = get_member_ident(ctxt, member)?;
let ty_info = match parse_ty(ctxt, ty) {
fn token_stream_for_field(
ast_result: &ASTResult,
member: &syn::Member,
ty: &syn::Type,
is_option: bool,
) -> Option<TokenStream> {
let ident = get_member_ident(ast_result, member)?;
let ty_info = match parse_ty(ast_result, ty) {
Ok(ty_info) => ty_info,
Err(e) => {
eprintln!("token_stream_for_field: {:?} with error: {}", member, e);
@ -129,12 +134,12 @@ fn token_stream_for_field(ctxt: &Ctxt, member: &syn::Member, ty: &syn::Type, is_
}?;
match ident_category(ty_info.ident) {
TypeCategory::Array => {
assert_bracket_ty_is_some(ctxt, &ty_info);
token_stream_for_vec(ctxt, member, &ty_info.bracket_ty_info.unwrap())
assert_bracket_ty_is_some(ast_result, &ty_info);
token_stream_for_vec(ast_result, member, &ty_info.bracket_ty_info.unwrap())
}
TypeCategory::Map => {
assert_bracket_ty_is_some(ctxt, &ty_info);
token_stream_for_map(ctxt, member, &ty_info.bracket_ty_info.unwrap())
assert_bracket_ty_is_some(ast_result, &ty_info);
token_stream_for_map(ast_result, member, &ty_info.bracket_ty_info.unwrap())
}
TypeCategory::Protobuf => {
// if the type wrapped by SingularPtrField, should call take first
@ -174,7 +179,7 @@ fn token_stream_for_field(ctxt: &Ctxt, member: &syn::Member, ty: &syn::Type, is_
})
}
}
TypeCategory::Opt => token_stream_for_field(ctxt, member, ty_info.bracket_ty_info.unwrap().ty, true),
TypeCategory::Opt => token_stream_for_field(ast_result, member, ty_info.bracket_ty_info.unwrap().ty, true),
TypeCategory::Primitive | TypeCategory::Bytes => {
// eprintln!("😄 #{:?}", &field.name().unwrap());
if is_option {
@ -186,7 +191,7 @@ fn token_stream_for_field(ctxt: &Ctxt, member: &syn::Member, ty: &syn::Type, is_
}
}
fn token_stream_for_vec(ctxt: &Ctxt, member: &syn::Member, bracketed_type: &TyInfo) -> Option<TokenStream> {
fn token_stream_for_vec(ctxt: &ASTResult, member: &syn::Member, bracketed_type: &TyInfo) -> Option<TokenStream> {
let ident = get_member_ident(ctxt, member)?;
match ident_category(bracketed_type.ident) {
@ -218,8 +223,8 @@ fn token_stream_for_vec(ctxt: &Ctxt, member: &syn::Member, bracketed_type: &TyIn
}
}
fn token_stream_for_map(ctxt: &Ctxt, member: &syn::Member, ty_info: &TyInfo) -> Option<TokenStream> {
let ident = get_member_ident(ctxt, member)?;
fn token_stream_for_map(ast_result: &ASTResult, member: &syn::Member, ty_info: &TyInfo) -> Option<TokenStream> {
let ident = get_member_ident(ast_result, member)?;
let take_ident = format_ident!("take_{}", ident.to_string());
let ty = ty_info.ty;

View File

@ -2,7 +2,7 @@ use flowy_ast::*;
use proc_macro2::TokenStream;
#[allow(dead_code)]
pub fn make_enum_token_stream(_ctxt: &Ctxt, cont: &ASTContainer) -> Option<TokenStream> {
pub fn make_enum_token_stream(_ast_result: &ASTResult, cont: &ASTContainer) -> Option<TokenStream> {
let enum_ident = &cont.ident;
let pb_enum = cont.attrs.pb_enum_type()?;
let build_to_pb_enum = cont.data.all_idents().map(|i| {

View File

@ -11,40 +11,40 @@ use proc_macro2::TokenStream;
use std::default::Default;
pub fn expand_derive(input: &syn::DeriveInput) -> Result<TokenStream, Vec<syn::Error>> {
let ctxt = Ctxt::new();
let cont = match ASTContainer::from_ast(&ctxt, input) {
let ast_result = ASTResult::new();
let cont = match ASTContainer::from_ast(&ast_result, input) {
Some(cont) => cont,
None => return Err(ctxt.check().unwrap_err()),
None => return Err(ast_result.check().unwrap_err()),
};
let mut token_stream: TokenStream = TokenStream::default();
if let Some(de_token_stream) = make_de_token_steam(&ctxt, &cont) {
if let Some(de_token_stream) = make_de_token_steam(&ast_result, &cont) {
token_stream.extend(de_token_stream);
}
if let Some(se_token_stream) = make_se_token_stream(&ctxt, &cont) {
if let Some(se_token_stream) = make_se_token_stream(&ast_result, &cont) {
token_stream.extend(se_token_stream);
}
ctxt.check()?;
ast_result.check()?;
Ok(token_stream)
}
pub fn expand_enum_derive(input: &syn::DeriveInput) -> Result<TokenStream, Vec<syn::Error>> {
let ctxt = Ctxt::new();
let cont = match ASTContainer::from_ast(&ctxt, input) {
let ast_result = ASTResult::new();
let cont = match ASTContainer::from_ast(&ast_result, input) {
Some(cont) => cont,
None => return Err(ctxt.check().unwrap_err()),
None => return Err(ast_result.check().unwrap_err()),
};
let mut token_stream: TokenStream = TokenStream::default();
if let Some(enum_token_stream) = make_enum_token_stream(&ctxt, &cont) {
if let Some(enum_token_stream) = make_enum_token_stream(&ast_result, &cont) {
token_stream.extend(enum_token_stream);
}
ctxt.check()?;
ast_result.check()?;
Ok(token_stream)
}
// #[macro_use]

View File

@ -3,7 +3,7 @@ use crate::proto_buf::util::{get_member_ident, ident_category, TypeCategory};
use flowy_ast::*;
use proc_macro2::TokenStream;
pub fn make_se_token_stream(ctxt: &Ctxt, ast: &ASTContainer) -> Option<TokenStream> {
pub fn make_se_token_stream(ast_result: &ASTResult, ast: &ASTContainer) -> Option<TokenStream> {
let pb_ty = ast.attrs.pb_struct_type()?;
let struct_ident = &ast.ident;
@ -11,7 +11,7 @@ pub fn make_se_token_stream(ctxt: &Ctxt, ast: &ASTContainer) -> Option<TokenStre
.data
.all_fields()
.filter(|f| !f.attrs.skip_serializing())
.flat_map(|field| se_token_stream_for_field(ctxt, field, false));
.flat_map(|field| se_token_stream_for_field(ast_result, field, false));
let se_token_stream: TokenStream = quote! {
@ -37,21 +37,21 @@ pub fn make_se_token_stream(ctxt: &Ctxt, ast: &ASTContainer) -> Option<TokenStre
Some(se_token_stream)
}
fn se_token_stream_for_field(ctxt: &Ctxt, field: &ASTField, _take: bool) -> Option<TokenStream> {
fn se_token_stream_for_field(ast_result: &ASTResult, field: &ASTField, _take: bool) -> Option<TokenStream> {
if let Some(func) = &field.attrs.serialize_with() {
let member = &field.member;
Some(quote! { pb.#member=o.#func(); })
} else if field.attrs.is_one_of() {
token_stream_for_one_of(ctxt, field)
token_stream_for_one_of(ast_result, field)
} else {
gen_token_stream(ctxt, &field.member, field.ty, false)
gen_token_stream(ast_result, &field.member, field.ty, false)
}
}
fn token_stream_for_one_of(ctxt: &Ctxt, field: &ASTField) -> Option<TokenStream> {
fn token_stream_for_one_of(ast_result: &ASTResult, field: &ASTField) -> Option<TokenStream> {
let member = &field.member;
let ident = get_member_ident(ctxt, member)?;
let ty_info = match parse_ty(ctxt, field.ty) {
let ident = get_member_ident(ast_result, member)?;
let ty_info = match parse_ty(ast_result, field.ty) {
Ok(ty_info) => ty_info,
Err(e) => {
eprintln!("token_stream_for_one_of failed: {:?} with error: {}", member, e);
@ -85,8 +85,13 @@ fn token_stream_for_one_of(ctxt: &Ctxt, field: &ASTField) -> Option<TokenStream>
}
}
fn gen_token_stream(ctxt: &Ctxt, member: &syn::Member, ty: &syn::Type, is_option: bool) -> Option<TokenStream> {
let ty_info = match parse_ty(ctxt, ty) {
fn gen_token_stream(
ast_result: &ASTResult,
member: &syn::Member,
ty: &syn::Type,
is_option: bool,
) -> Option<TokenStream> {
let ty_info = match parse_ty(ast_result, ty) {
Ok(ty_info) => ty_info,
Err(e) => {
eprintln!("gen_token_stream failed: {:?} with error: {}", member, e);
@ -94,8 +99,8 @@ fn gen_token_stream(ctxt: &Ctxt, member: &syn::Member, ty: &syn::Type, is_option
}
}?;
match ident_category(ty_info.ident) {
TypeCategory::Array => token_stream_for_vec(ctxt, member, ty_info.bracket_ty_info.unwrap().ty),
TypeCategory::Map => token_stream_for_map(ctxt, member, ty_info.bracket_ty_info.unwrap().ty),
TypeCategory::Array => token_stream_for_vec(ast_result, member, ty_info.bracket_ty_info.unwrap().ty),
TypeCategory::Map => token_stream_for_map(ast_result, member, ty_info.bracket_ty_info.unwrap().ty),
TypeCategory::Str => {
if is_option {
Some(quote! {
@ -109,7 +114,7 @@ fn gen_token_stream(ctxt: &Ctxt, member: &syn::Member, ty: &syn::Type, is_option
}
}
TypeCategory::Protobuf => Some(quote! { pb.#member = ::protobuf::SingularPtrField::some(o.#member.into()); }),
TypeCategory::Opt => gen_token_stream(ctxt, member, ty_info.bracket_ty_info.unwrap().ty, true),
TypeCategory::Opt => gen_token_stream(ast_result, member, ty_info.bracket_ty_info.unwrap().ty, true),
TypeCategory::Enum => {
// let pb_enum_ident = format_ident!("{}", ty_info.ident.to_string());
// Some(quote! {
@ -124,8 +129,8 @@ fn gen_token_stream(ctxt: &Ctxt, member: &syn::Member, ty: &syn::Type, is_option
}
// e.g. pub cells: Vec<CellData>, the member will be cells, ty would be Vec
fn token_stream_for_vec(ctxt: &Ctxt, member: &syn::Member, ty: &syn::Type) -> Option<TokenStream> {
let ty_info = match parse_ty(ctxt, ty) {
fn token_stream_for_vec(ast_result: &ASTResult, member: &syn::Member, ty: &syn::Type) -> Option<TokenStream> {
let ty_info = match parse_ty(ast_result, ty) {
Ok(ty_info) => ty_info,
Err(e) => {
eprintln!("token_stream_for_vec failed: {:?} with error: {}", member, e);
@ -150,9 +155,9 @@ fn token_stream_for_vec(ctxt: &Ctxt, member: &syn::Member, ty: &syn::Type) -> Op
}
// e.g. pub cells: HashMap<xx, xx>
fn token_stream_for_map(ctxt: &Ctxt, member: &syn::Member, ty: &syn::Type) -> Option<TokenStream> {
fn token_stream_for_map(ast_result: &ASTResult, member: &syn::Member, ty: &syn::Type) -> Option<TokenStream> {
// The key of the hashmap must be string
let ty_info = match parse_ty(ctxt, ty) {
let ty_info = match parse_ty(ast_result, ty) {
Ok(ty_info) => ty_info,
Err(e) => {
eprintln!("token_stream_for_map failed: {:?} with error: {}", member, e);

View File

@ -1,5 +1,5 @@
use dashmap::{DashMap, DashSet};
use flowy_ast::{Ctxt, TyInfo};
use flowy_ast::{ASTResult, TyInfo};
use flowy_codegen::ProtoCache;
use lazy_static::lazy_static;
use std::fs::File;
@ -12,18 +12,18 @@ pub fn ident_category(ident: &syn::Ident) -> TypeCategory {
category_from_str(ident_str)
}
pub(crate) fn get_member_ident<'a>(ctxt: &Ctxt, member: &'a syn::Member) -> Option<&'a syn::Ident> {
pub(crate) fn get_member_ident<'a>(ast_result: &ASTResult, member: &'a syn::Member) -> Option<&'a syn::Ident> {
if let syn::Member::Named(ref ident) = member {
Some(ident)
} else {
ctxt.error_spanned_by(member, "Unsupported member, shouldn't be self.0".to_string());
ast_result.error_spanned_by(member, "Unsupported member, shouldn't be self.0".to_string());
None
}
}
pub fn assert_bracket_ty_is_some(ctxt: &Ctxt, ty_info: &TyInfo) {
pub fn assert_bracket_ty_is_some(ast_result: &ASTResult, ty_info: &TyInfo) {
if ty_info.bracket_ty_info.is_none() {
ctxt.error_spanned_by(ty_info.ty, "Invalid bracketed type when gen de token steam".to_string());
ast_result.error_spanned_by(ty_info.ty, "Invalid bracketed type when gen de token steam".to_string());
}
}

View File

@ -11,7 +11,7 @@ protobuf = {version = "2.18.0"}
derive_more = {version = "0.99", features = ["display"]}
[build-dependencies]
flowy-codegen= { path = "../flowy-codegen"}
flowy-codegen= { path = "../flowy-codegen", features = ["proto_gen"]}
[features]

View File

@ -13,6 +13,6 @@ protobuf = {version = "2.18.0"}
md5 = "0.7.0"
[build-dependencies]
flowy-codegen= { path = "../flowy-codegen"}
flowy-codegen= { path = "../flowy-codegen", features = ["proto_gen"]}