[flutter]: config log & rename some funcs

This commit is contained in:
appflowy 2021-11-04 12:47:41 +08:00
parent 955bc3e103
commit ad2a5179a8
17 changed files with 75 additions and 68 deletions

View File

@ -20,7 +20,7 @@ class DocBloc extends Bloc<DocEvent, DocState> {
final IViewListener listener; final IViewListener listener;
final ITrash trasnManager; final ITrash trasnManager;
late Document document; late Document document;
late StreamSubscription? _subscription; StreamSubscription? _subscription;
DocBloc({ DocBloc({
required this.view, required this.view,

View File

@ -87,6 +87,7 @@ impl Revision {
impl std::fmt::Debug for Revision { impl std::fmt::Debug for Revision {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
let _ = f.write_fmt(format_args!("doc_id {}, ", self.doc_id))?; let _ = f.write_fmt(format_args!("doc_id {}, ", self.doc_id))?;
let _ = f.write_fmt(format_args!("base_rev_id {}, ", self.base_rev_id))?;
let _ = f.write_fmt(format_args!("rev_id {}, ", self.rev_id))?; let _ = f.write_fmt(format_args!("rev_id {}, ", self.rev_id))?;
match Delta::from_bytes(&self.delta_data) { match Delta::from_bytes(&self.delta_data) {
Ok(delta) => { Ok(delta) => {

View File

@ -65,7 +65,7 @@ impl FlowyDocument {
pub async fn apply_doc_delta(&self, params: DocDelta) -> Result<DocDelta, DocError> { pub async fn apply_doc_delta(&self, params: DocDelta) -> Result<DocDelta, DocError> {
// workaround: compare the rust's delta with flutter's delta. Will be removed // workaround: compare the rust's delta with flutter's delta. Will be removed
// very soon // very soon
let doc = self.doc_ctrl.edit_doc(params.clone()).await?; let doc = self.doc_ctrl.apply_local_delta(params.clone()).await?;
Ok(doc) Ok(doc)
} }
} }

View File

@ -45,7 +45,6 @@ impl DocController {
Ok(()) Ok(())
} }
#[tracing::instrument(level = "debug", skip(self, pool), err)]
pub(crate) async fn open( pub(crate) async fn open(
&self, &self,
params: DocIdentifier, params: DocIdentifier,
@ -78,10 +77,10 @@ impl DocController {
// as None e.g. // as None e.g.
// json : {"retain":7,"attributes":{"bold":null}} // json : {"retain":7,"attributes":{"bold":null}}
// deserialize delta: [ {retain: 7, attributes: {Bold: AttributeValue(None)}} ] // deserialize delta: [ {retain: 7, attributes: {Bold: AttributeValue(None)}} ]
#[tracing::instrument(level = "debug", skip(self, delta), err)] #[tracing::instrument(level = "debug", skip(self, delta), fields(doc_id = %delta.doc_id), err)]
pub(crate) async fn edit_doc(&self, delta: DocDelta) -> Result<DocDelta, DocError> { pub(crate) async fn apply_local_delta(&self, delta: DocDelta) -> Result<DocDelta, DocError> {
let edit_doc_ctx = self.cache.get(&delta.doc_id)?; let edit_doc_ctx = self.cache.get(&delta.doc_id)?;
let _ = edit_doc_ctx.compose_local_delta(Bytes::from(delta.data)).await?; let _ = edit_doc_ctx.composing_local_delta(Bytes::from(delta.data)).await?;
Ok(edit_doc_ctx.delta().await?) Ok(edit_doc_ctx.delta().await?)
} }
} }

View File

@ -51,7 +51,7 @@ impl DocumentActor {
async fn handle_message(&self, msg: DocumentMsg) -> DocResult<()> { async fn handle_message(&self, msg: DocumentMsg) -> DocResult<()> {
match msg { match msg {
DocumentMsg::Delta { delta, ret } => { DocumentMsg::Delta { delta, ret } => {
let result = self.compose_delta(delta).await; let result = self.composed_delta(delta).await;
let _ = ret.send(result); let _ = ret.send(result);
}, },
DocumentMsg::RemoteRevision { bytes, ret } => { DocumentMsg::RemoteRevision { bytes, ret } => {
@ -112,11 +112,15 @@ impl DocumentActor {
Ok(()) Ok(())
} }
async fn compose_delta(&self, delta: Delta) -> DocResult<()> { #[tracing::instrument(level = "debug", skip(self, delta), fields(compose_result), err)]
async fn composed_delta(&self, delta: Delta) -> DocResult<()> {
// tracing::debug!("{:?} thread handle_message", thread::current(),); // tracing::debug!("{:?} thread handle_message", thread::current(),);
let mut document = self.document.write().await; let mut document = self.document.write().await;
let result = document.compose_delta(&delta); let result = document.compose_delta(&delta);
tracing::debug!("doc_id:{} - Compose push delta: {}", &self.doc_id, delta.to_json(),); tracing::Span::current().record(
"compose_result",
&format!("doc_id:{} - {}", &self.doc_id, delta.to_json()).as_str(),
);
drop(document); drop(document);
result result

View File

@ -72,7 +72,7 @@ impl ClientEditDoc {
}; };
let _ = self.document.send(msg); let _ = self.document.send(msg);
let delta = rx.await.map_err(internal_error)??; let delta = rx.await.map_err(internal_error)??;
let rev_id = self.save_revision(delta).await?; let rev_id = self.save_local_delta(delta).await?;
save_document(self.document.clone(), rev_id.into()).await save_document(self.document.clone(), rev_id.into()).await
} }
@ -81,7 +81,7 @@ impl ClientEditDoc {
let msg = DocumentMsg::Delete { interval, ret }; let msg = DocumentMsg::Delete { interval, ret };
let _ = self.document.send(msg); let _ = self.document.send(msg);
let delta = rx.await.map_err(internal_error)??; let delta = rx.await.map_err(internal_error)??;
let _ = self.save_revision(delta).await?; let _ = self.save_local_delta(delta).await?;
Ok(()) Ok(())
} }
@ -94,7 +94,7 @@ impl ClientEditDoc {
}; };
let _ = self.document.send(msg); let _ = self.document.send(msg);
let delta = rx.await.map_err(internal_error)??; let delta = rx.await.map_err(internal_error)??;
let _ = self.save_revision(delta).await?; let _ = self.save_local_delta(delta).await?;
Ok(()) Ok(())
} }
@ -107,7 +107,7 @@ impl ClientEditDoc {
}; };
let _ = self.document.send(msg); let _ = self.document.send(msg);
let delta = rx.await.map_err(internal_error)??; let delta = rx.await.map_err(internal_error)??;
let _ = self.save_revision(delta).await?; let _ = self.save_local_delta(delta).await?;
Ok(()) Ok(())
} }
@ -151,13 +151,9 @@ impl ClientEditDoc {
}) })
} }
#[tracing::instrument(level = "debug", skip(self, delta), fields(revision_delta = %delta.to_json(), send_state, base_rev_id, rev_id))] async fn save_local_delta(&self, delta: Delta) -> Result<RevId, DocError> {
async fn save_revision(&self, delta: Delta) -> Result<RevId, DocError> {
let delta_data = delta.to_bytes(); let delta_data = delta.to_bytes();
let (base_rev_id, rev_id) = self.rev_manager.next_rev_id(); let (base_rev_id, rev_id) = self.rev_manager.next_rev_id();
tracing::Span::current().record("base_rev_id", &base_rev_id);
tracing::Span::current().record("rev_id", &rev_id);
let delta_data = delta_data.to_vec(); let delta_data = delta_data.to_vec();
let revision = Revision::new(base_rev_id, rev_id, delta_data, &self.doc_id, RevType::Local); let revision = Revision::new(base_rev_id, rev_id, delta_data, &self.doc_id, RevType::Local);
let _ = self.rev_manager.add_revision(&revision).await?; let _ = self.rev_manager.add_revision(&revision).await?;
@ -165,7 +161,7 @@ impl ClientEditDoc {
} }
#[tracing::instrument(level = "debug", skip(self, data), err)] #[tracing::instrument(level = "debug", skip(self, data), err)]
pub(crate) async fn compose_local_delta(&self, data: Bytes) -> Result<(), DocError> { pub(crate) async fn composing_local_delta(&self, data: Bytes) -> Result<(), DocError> {
let delta = Delta::from_bytes(&data)?; let delta = Delta::from_bytes(&data)?;
let (ret, rx) = oneshot::channel::<DocResult<()>>(); let (ret, rx) = oneshot::channel::<DocResult<()>>();
let msg = DocumentMsg::Delta { let msg = DocumentMsg::Delta {
@ -175,7 +171,7 @@ impl ClientEditDoc {
let _ = self.document.send(msg); let _ = self.document.send(msg);
let _ = rx.await.map_err(internal_error)??; let _ = rx.await.map_err(internal_error)??;
let rev_id = self.save_revision(delta).await?; let rev_id = self.save_local_delta(delta).await?;
save_document(self.document.clone(), rev_id).await save_document(self.document.clone(), rev_id).await
} }
@ -271,7 +267,7 @@ impl ClientEditDoc {
WsDataType::NewDocUser => {}, WsDataType::NewDocUser => {},
WsDataType::Acked => { WsDataType::Acked => {
let rev_id = RevId::try_from(bytes)?; let rev_id = RevId::try_from(bytes)?;
let _ = self.rev_manager.ack_rev(rev_id).await?; let _ = self.rev_manager.ack_revision(rev_id).await?;
}, },
WsDataType::Conflict => {}, WsDataType::Conflict => {},
} }
@ -304,7 +300,7 @@ fn spawn_rev_receiver(mut receiver: mpsc::UnboundedReceiver<Revision>, ws: Arc<d
tokio::spawn(async move { tokio::spawn(async move {
loop { loop {
while let Some(revision) = receiver.recv().await { while let Some(revision) = receiver.recv().await {
tracing::debug!("Send revision:{} to server", revision.rev_id); // tracing::debug!("Send revision:{} to server", revision.rev_id);
match ws.send(revision.into()) { match ws.send(revision.into()) {
Ok(_) => {}, Ok(_) => {},
Err(e) => log::error!("Send revision failed: {:?}", e), Err(e) => log::error!("Send revision failed: {:?}", e),

View File

@ -42,12 +42,12 @@ impl RevisionManager {
} }
pub async fn add_revision(&self, revision: &Revision) -> Result<(), DocError> { pub async fn add_revision(&self, revision: &Revision) -> Result<(), DocError> {
let _ = self.rev_store.handle_new_revision(revision.clone()).await?; let _ = self.rev_store.add_revision(revision.clone()).await?;
Ok(()) Ok(())
} }
pub async fn ack_rev(&self, rev_id: RevId) -> Result<(), DocError> { pub async fn ack_revision(&self, rev_id: RevId) -> Result<(), DocError> {
self.rev_store.handle_revision_acked(rev_id).await; self.rev_store.ack_revision(rev_id).await;
Ok(()) Ok(())
} }

View File

@ -55,7 +55,7 @@ impl RevisionStore {
} }
#[tracing::instrument(level = "debug", skip(self, revision))] #[tracing::instrument(level = "debug", skip(self, revision))]
pub async fn handle_new_revision(&self, revision: Revision) -> DocResult<()> { pub async fn add_revision(&self, revision: Revision) -> DocResult<()> {
if self.revs_map.contains_key(&revision.rev_id) { if self.revs_map.contains_key(&revision.rev_id) {
return Err(DocError::duplicate_rev().context(format!("Duplicate revision id: {}", revision.rev_id))); return Err(DocError::duplicate_rev().context(format!("Duplicate revision id: {}", revision.rev_id)));
} }
@ -82,8 +82,8 @@ impl RevisionStore {
Ok(()) Ok(())
} }
#[tracing::instrument(level = "debug", skip(self))] #[tracing::instrument(level = "debug", skip(self, rev_id), fields(rev_id = %rev_id.as_ref()))]
pub async fn handle_revision_acked(&self, rev_id: RevId) { pub async fn ack_revision(&self, rev_id: RevId) {
let rev_id = rev_id.value; let rev_id = rev_id.value;
self.pending_revs self.pending_revs
.write() .write()
@ -113,8 +113,14 @@ impl RevisionStore {
.map(|kv| (kv.revision.clone(), kv.state)) .map(|kv| (kv.revision.clone(), kv.state))
.collect::<Vec<(Revision, RevState)>>(); .collect::<Vec<(Revision, RevState)>>();
match persistence.create_revs(revisions_state) { match persistence.create_revs(revisions_state.clone()) {
Ok(_) => revs_map.retain(|k, _| !ids.contains(k)), Ok(_) => {
tracing::debug!(
"Revision State Changed: {:?}",
revisions_state.iter().map(|s| (s.0.rev_id, s.1)).collect::<Vec<_>>()
);
revs_map.retain(|k, _| !ids.contains(k));
},
Err(e) => log::error!("Save revision failed: {:?}", e), Err(e) => log::error!("Save revision failed: {:?}", e),
} }
})); }));

View File

@ -23,7 +23,6 @@ impl RevTableSql {
let records = revisions let records = revisions
.into_iter() .into_iter()
.map(|(revision, new_state)| { .map(|(revision, new_state)| {
tracing::debug!("Set {} to {:?}", revision.rev_id, new_state);
let rev_ty: RevTableType = revision.ty.into(); let rev_ty: RevTableType = revision.ty.into();
( (
doc_id.eq(revision.doc_id), doc_id.eq(revision.doc_id),

View File

@ -19,15 +19,18 @@ lazy_static! {
pub struct Builder { pub struct Builder {
name: String, name: String,
env_filter: String, env_filter: String,
file_appender: Option<RollingFileAppender>, file_appender: RollingFileAppender,
} }
impl Builder { impl Builder {
pub fn new(name: &str) -> Self { pub fn new(name: &str, directory: &str) -> Self {
// let directory = directory.as_ref().to_str().unwrap().to_owned();
let local_file_name = format!("{}.log", name);
Builder { Builder {
name: name.to_owned(), name: name.to_owned(),
env_filter: "Info".to_owned(), env_filter: "Info".to_owned(),
file_appender: None, file_appender: tracing_appender::rolling::daily(directory, local_file_name),
} }
} }
@ -36,18 +39,10 @@ impl Builder {
self self
} }
pub fn local(mut self, directory: impl AsRef<Path>) -> Self {
let directory = directory.as_ref().to_str().unwrap().to_owned();
let local_file_name = format!("{}.log", &self.name);
self.file_appender = Some(tracing_appender::rolling::daily(directory, local_file_name));
self
}
pub fn build(self) -> std::result::Result<(), String> { pub fn build(self) -> std::result::Result<(), String> {
let env_filter = EnvFilter::new(self.env_filter); let env_filter = EnvFilter::new(self.env_filter);
let file_appender = self.file_appender.unwrap();
let (non_blocking, guard) = tracing_appender::non_blocking(file_appender); let (non_blocking, guard) = tracing_appender::non_blocking(self.file_appender);
let subscriber = tracing_subscriber::fmt() let subscriber = tracing_subscriber::fmt()
// .with_span_events(FmtSpan::NEW | FmtSpan::CLOSE) // .with_span_events(FmtSpan::NEW | FmtSpan::CLOSE)
.with_ansi(false) .with_ansi(false)
@ -61,7 +56,8 @@ impl Builder {
.with_span_list(true) .with_span_list(true)
.compact() .compact()
.finish() .finish()
.with(env_filter).with(JsonStorageLayer) .with(env_filter)
.with(JsonStorageLayer)
.with(FlowyFormattingLayer::new(std::io::stdout)) .with(FlowyFormattingLayer::new(std::io::stdout))
.with(FlowyFormattingLayer::new(non_blocking)); .with(FlowyFormattingLayer::new(non_blocking));
@ -98,7 +94,7 @@ mod tests {
// run cargo test --features="use_bunyan" or cargo test // run cargo test --features="use_bunyan" or cargo test
#[test] #[test]
fn test_log() { fn test_log() {
let _ = Builder::new("flowy").local(".").env_filter("debug").build().unwrap(); let _ = Builder::new("flowy", ".").env_filter("debug").build().unwrap();
tracing::info!("😁 tracing::info call"); tracing::info!("😁 tracing::info call");
log::debug!("😁 log::debug call"); log::debug!("😁 log::debug call");

View File

@ -101,8 +101,7 @@ fn init_log(config: &FlowySDKConfig) {
if !INIT_LOG.load(Ordering::SeqCst) { if !INIT_LOG.load(Ordering::SeqCst) {
INIT_LOG.store(true, Ordering::SeqCst); INIT_LOG.store(true, Ordering::SeqCst);
let _ = flowy_log::Builder::new("flowy-client") let _ = flowy_log::Builder::new("flowy-client", &config.root)
.local(&config.root)
.env_filter(&config.log_filter) .env_filter(&config.log_filter)
.build(); .build();
} }

View File

@ -39,7 +39,7 @@ impl std::fmt::Display for TrashIdentifiers {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str(&format!( f.write_str(&format!(
"{:?}", "{:?}",
&self.items.iter().map(|item| &item.id).collect::<Vec<_>>() &self.items.iter().map(|item| format!("{}", item)).collect::<Vec<_>>()
)) ))
} }
} }
@ -94,6 +94,10 @@ impl std::convert::From<&Trash> for TrashIdentifier {
} }
} }
impl std::fmt::Display for TrashIdentifier {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { f.write_str(&format!("{:?}:{}", self.ty, self.id)) }
}
#[derive(PartialEq, ProtoBuf, Default, Debug, Clone)] #[derive(PartialEq, ProtoBuf, Default, Debug, Clone)]
pub struct Trash { pub struct Trash {
#[pb(index = 1)] #[pb(index = 1)]

View File

@ -17,7 +17,6 @@ use crate::{
use flowy_dispatch::prelude::{data_result, Data, DataResult, Unit}; use flowy_dispatch::prelude::{data_result, Data, DataResult, Unit};
use std::{convert::TryInto, sync::Arc}; use std::{convert::TryInto, sync::Arc};
#[tracing::instrument(skip(data, controller), err)]
pub(crate) async fn create_app_handler( pub(crate) async fn create_app_handler(
data: Data<CreateAppRequest>, data: Data<CreateAppRequest>,
controller: Unit<Arc<AppController>>, controller: Unit<Arc<AppController>>,
@ -28,7 +27,6 @@ pub(crate) async fn create_app_handler(
data_result(detail) data_result(detail)
} }
#[tracing::instrument(skip(data, controller, trash_can))]
pub(crate) async fn delete_app_handler( pub(crate) async fn delete_app_handler(
data: Data<QueryAppRequest>, data: Data<QueryAppRequest>,
controller: Unit<Arc<AppController>>, controller: Unit<Arc<AppController>>,

View File

@ -19,7 +19,6 @@ use flowy_dispatch::prelude::{data_result, Data, DataResult, Unit};
use flowy_document::entities::doc::DocDelta; use flowy_document::entities::doc::DocDelta;
use std::{convert::TryInto, sync::Arc}; use std::{convert::TryInto, sync::Arc};
#[tracing::instrument(skip(data, controller), err)]
pub(crate) async fn create_view_handler( pub(crate) async fn create_view_handler(
data: Data<CreateViewRequest>, data: Data<CreateViewRequest>,
controller: Unit<Arc<ViewController>>, controller: Unit<Arc<ViewController>>,
@ -29,7 +28,6 @@ pub(crate) async fn create_view_handler(
data_result(view) data_result(view)
} }
#[tracing::instrument(skip(data, controller), err)]
pub(crate) async fn read_view_handler( pub(crate) async fn read_view_handler(
data: Data<QueryViewRequest>, data: Data<QueryViewRequest>,
controller: Unit<Arc<ViewController>>, controller: Unit<Arc<ViewController>>,
@ -52,7 +50,6 @@ pub(crate) async fn update_view_handler(
Ok(()) Ok(())
} }
#[tracing::instrument(skip(data, controller), err)]
pub(crate) async fn apply_doc_delta_handler( pub(crate) async fn apply_doc_delta_handler(
data: Data<DocDelta>, data: Data<DocDelta>,
controller: Unit<Arc<ViewController>>, controller: Unit<Arc<ViewController>>,
@ -62,7 +59,6 @@ pub(crate) async fn apply_doc_delta_handler(
data_result(doc) data_result(doc)
} }
#[tracing::instrument(skip(data, controller, trash_can), err)]
pub(crate) async fn delete_view_handler( pub(crate) async fn delete_view_handler(
data: Data<QueryViewRequest>, data: Data<QueryViewRequest>,
controller: Unit<Arc<ViewController>>, controller: Unit<Arc<ViewController>>,
@ -83,7 +79,6 @@ pub(crate) async fn delete_view_handler(
Ok(()) Ok(())
} }
#[tracing::instrument(skip(data, controller), err)]
pub(crate) async fn open_view_handler( pub(crate) async fn open_view_handler(
data: Data<QueryViewRequest>, data: Data<QueryViewRequest>,
controller: Unit<Arc<ViewController>>, controller: Unit<Arc<ViewController>>,
@ -93,7 +88,6 @@ pub(crate) async fn open_view_handler(
data_result(doc) data_result(doc)
} }
#[tracing::instrument(skip(data, controller), err)]
pub(crate) async fn close_view_handler( pub(crate) async fn close_view_handler(
data: Data<QueryViewRequest>, data: Data<QueryViewRequest>,
controller: Unit<Arc<ViewController>>, controller: Unit<Arc<ViewController>>,

View File

@ -40,7 +40,7 @@ impl AppController {
Ok(()) Ok(())
} }
#[tracing::instrument(level = "debug", skip(self), err)] #[tracing::instrument(level = "debug", skip(self, params), fields(name = %params.name) err)]
pub(crate) async fn create_app(&self, params: CreateAppParams) -> Result<App, WorkspaceError> { pub(crate) async fn create_app(&self, params: CreateAppParams) -> Result<App, WorkspaceError> {
let app = self.create_app_on_server(params).await?; let app = self.create_app_on_server(params).await?;
let conn = &*self.database.db_connection()?; let conn = &*self.database.db_connection()?;
@ -179,7 +179,7 @@ impl AppController {
} }
} }
#[tracing::instrument(level = "debug", skip(database, trash_can))] #[tracing::instrument(level = "trace", skip(database, trash_can))]
async fn handle_trash_event(database: Arc<dyn WorkspaceDatabase>, trash_can: Arc<TrashCan>, event: TrashEvent) { async fn handle_trash_event(database: Arc<dyn WorkspaceDatabase>, trash_can: Arc<TrashCan>, event: TrashEvent) {
let db_result = database.db_connection(); let db_result = database.db_connection();
match event { match event {

View File

@ -137,7 +137,7 @@ impl TrashCan {
// DELETE operations. Its not possible for us to use these commands to // DELETE operations. Its not possible for us to use these commands to
// CREATE and DROP tables operations because those are auto-commit in the // CREATE and DROP tables operations because those are auto-commit in the
// database. // database.
#[tracing::instrument(level = "debug", skip(self, trash), fields(trash_count), err)] #[tracing::instrument(name = "add_trash", level = "debug", skip(self, trash), fields(trash_ids), err)]
pub async fn add<T: Into<Trash>>(&self, trash: Vec<T>) -> Result<(), WorkspaceError> { pub async fn add<T: Into<Trash>>(&self, trash: Vec<T>) -> Result<(), WorkspaceError> {
let (tx, mut rx) = mpsc::channel::<WorkspaceResult<()>>(1); let (tx, mut rx) = mpsc::channel::<WorkspaceResult<()>>(1);
let repeated_trash = trash.into_iter().map(|t| t.into()).collect::<Vec<Trash>>(); let repeated_trash = trash.into_iter().map(|t| t.into()).collect::<Vec<Trash>>();
@ -146,7 +146,17 @@ impl TrashCan {
.map(|t| t.into()) .map(|t| t.into())
.collect::<Vec<TrashIdentifier>>(); .collect::<Vec<TrashIdentifier>>();
tracing::Span::current().record("trash_count", &identifiers.len()); tracing::Span::current().record(
"trash_ids",
&format!(
"{:?}",
identifiers
.iter()
.map(|identifier| format!("{:?}:{}", identifier.ty, identifier.id))
.collect::<Vec<_>>()
)
.as_str(),
);
let _ = thread::scope(|_s| { let _ = thread::scope(|_s| {
let conn = self.database.db_connection()?; let conn = self.database.db_connection()?;
conn.immediate_transaction::<_, WorkspaceError, _>(|| { conn.immediate_transaction::<_, WorkspaceError, _>(|| {
@ -257,9 +267,9 @@ impl TrashCan {
} }
} }
#[tracing::instrument(skip(repeated_trash), fields(trash_count))] #[tracing::instrument(skip(repeated_trash), fields(n_trash))]
fn notify_trash_changed(repeated_trash: RepeatedTrash) { fn notify_trash_changed(repeated_trash: RepeatedTrash) {
tracing::Span::current().record("trash_count", &repeated_trash.len()); tracing::Span::current().record("n_trash", &repeated_trash.len());
send_anonymous_dart_notification(WorkspaceNotification::TrashUpdated) send_anonymous_dart_notification(WorkspaceNotification::TrashUpdated)
.payload(repeated_trash) .payload(repeated_trash)
.send(); .send();

View File

@ -57,7 +57,7 @@ impl ViewController {
Ok(()) Ok(())
} }
#[tracing::instrument(level = "debug", skip(self, params), err)] #[tracing::instrument(level = "debug", skip(self, params), fields(name = %params.name), err)]
pub(crate) async fn create_view(&self, params: CreateViewParams) -> Result<View, WorkspaceError> { pub(crate) async fn create_view(&self, params: CreateViewParams) -> Result<View, WorkspaceError> {
let view = self.create_view_on_server(params.clone()).await?; let view = self.create_view_on_server(params.clone()).await?;
let conn = &*self.database.db_connection()?; let conn = &*self.database.db_connection()?;
@ -79,6 +79,7 @@ impl ViewController {
Ok(()) Ok(())
} }
#[tracing::instrument(skip(self, params), fields(view_id = %params.view_id), err)]
pub(crate) async fn read_view(&self, params: ViewIdentifier) -> Result<View, WorkspaceError> { pub(crate) async fn read_view(&self, params: ViewIdentifier) -> Result<View, WorkspaceError> {
let conn = self.database.db_connection()?; let conn = self.database.db_connection()?;
let view_table = ViewTableSql::read_view(&params.view_id, &*conn)?; let view_table = ViewTableSql::read_view(&params.view_id, &*conn)?;
@ -106,19 +107,19 @@ impl ViewController {
Ok(view_tables) Ok(view_tables)
} }
#[tracing::instrument(level = "debug", skip(self), err)] #[tracing::instrument(level = "debug", skip(self, params), fields(doc_id = %params.doc_id), err)]
pub(crate) async fn open_view(&self, params: DocIdentifier) -> Result<DocDelta, WorkspaceError> { pub(crate) async fn open_view(&self, params: DocIdentifier) -> Result<DocDelta, WorkspaceError> {
let edit_context = self.document.open(params, self.database.db_pool()?).await?; let edit_context = self.document.open(params, self.database.db_pool()?).await?;
Ok(edit_context.delta().await.map_err(internal_error)?) Ok(edit_context.delta().await.map_err(internal_error)?)
} }
#[tracing::instrument(level = "debug", skip(self), err)] #[tracing::instrument(level = "debug", skip(self,params), fields(doc_id = %params.doc_id), err)]
pub(crate) async fn close_view(&self, params: DocIdentifier) -> Result<(), WorkspaceError> { pub(crate) async fn close_view(&self, params: DocIdentifier) -> Result<(), WorkspaceError> {
let _ = self.document.close(params).await?; let _ = self.document.close(params).await?;
Ok(()) Ok(())
} }
#[tracing::instrument(level = "debug", skip(self), err)] #[tracing::instrument(level = "debug", skip(self, params), fields(doc_id = %params.doc_id), err)]
pub(crate) async fn duplicate_view(&self, params: DocIdentifier) -> Result<(), WorkspaceError> { pub(crate) async fn duplicate_view(&self, params: DocIdentifier) -> Result<(), WorkspaceError> {
let view: View = ViewTableSql::read_view(&params.doc_id, &*self.database.db_connection()?)?.into(); let view: View = ViewTableSql::read_view(&params.doc_id, &*self.database.db_connection()?)?.into();
let delta_data = self let delta_data = self
@ -254,7 +255,7 @@ impl ViewController {
} }
} }
#[tracing::instrument(level = "debug", skip(database, document, trash_can))] #[tracing::instrument(level = "trace", skip(database, document, trash_can))]
async fn handle_trash_event( async fn handle_trash_event(
database: Arc<dyn WorkspaceDatabase>, database: Arc<dyn WorkspaceDatabase>,
document: Arc<FlowyDocument>, document: Arc<FlowyDocument>,