add backend kv store

This commit is contained in:
appflowy 2021-12-21 21:06:16 +08:00
parent da4f7f40e7
commit eb601932ea
24 changed files with 298 additions and 54 deletions

View File

@ -5,14 +5,7 @@
1. follow the [instructions](https://docs.docker.com/desktop/mac/install/) to install docker. 1. follow the [instructions](https://docs.docker.com/desktop/mac/install/) to install docker.
2. open terminal and run: `docker pull postgres` 2. open terminal and run: `docker pull postgres`
3run `make init_database`. It will create the database scheme on remote specified by DATABASE_URL. You can connect you database using
3. run `make init_postgres` if you have not run before. You can find out the running container by run `docker ps`
```
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
bfcdd6369e89 postgres "docker-entrypoint.s…" 19 minutes ago Up 19 minutes 0.0.0.0:5432->5432/tcp, :::5432->5432/tcp brave_bassi
```
4. run `make init_database`. It will create the database scheme on remote specified by DATABASE_URL. You can connect you database using
pgAdmin. pgAdmin.
![img_2.png](img_2.png) ![img_2.png](img_2.png)

View File

@ -0,0 +1,6 @@
-- Add migration script here
CREATE TABLE IF NOT EXISTS kv_table(
id TEXT NOT NULL,
PRIMARY KEY (id),
blob bytea
);

View File

@ -1,5 +1,6 @@
use crate::services::{ use crate::services::{
document::manager::DocumentManager, document::manager::DocumentManager,
kv_store::{KVStore, PostgresKV},
web_socket::{WSServer, WebSocketReceivers}, web_socket::{WSServer, WebSocketReceivers},
}; };
use actix::Addr; use actix::Addr;
@ -14,6 +15,7 @@ pub struct AppContext {
pub pg_pool: Data<PgPool>, pub pg_pool: Data<PgPool>,
pub ws_receivers: Data<WebSocketReceivers>, pub ws_receivers: Data<WebSocketReceivers>,
pub document_mng: Data<Arc<DocumentManager>>, pub document_mng: Data<Arc<DocumentManager>>,
pub kv_store: Data<Arc<dyn KVStore>>,
} }
impl AppContext { impl AppContext {
@ -24,12 +26,16 @@ impl AppContext {
let mut ws_receivers = WebSocketReceivers::new(); let mut ws_receivers = WebSocketReceivers::new();
let document_mng = Arc::new(DocumentManager::new(pg_pool.clone())); let document_mng = Arc::new(DocumentManager::new(pg_pool.clone()));
ws_receivers.set(WSModule::Doc, document_mng.clone()); ws_receivers.set(WSModule::Doc, document_mng.clone());
let kv_store = Arc::new(PostgresKV {
pg_pool: pg_pool.clone(),
});
AppContext { AppContext {
ws_server, ws_server,
pg_pool, pg_pool,
ws_receivers: Data::new(ws_receivers), ws_receivers: Data::new(ws_receivers),
document_mng: Data::new(document_mng), document_mng: Data::new(document_mng),
kv_store: Data::new(kv_store),
} }
} }
} }

View File

@ -80,14 +80,14 @@ impl NewAppSqlBuilder {
let app: App = self.table.clone().into(); let app: App = self.table.clone().into();
let (sql, args) = SqlBuilder::create(APP_TABLE) let (sql, args) = SqlBuilder::create(APP_TABLE)
.add_arg("id", self.table.id) .add_field_with_arg("id", self.table.id)
.add_arg("workspace_id", self.table.workspace_id) .add_field_with_arg("workspace_id", self.table.workspace_id)
.add_arg("name", self.table.name) .add_field_with_arg("name", self.table.name)
.add_arg("description", self.table.description) .add_field_with_arg("description", self.table.description)
.add_arg("color_style", self.table.color_style) .add_field_with_arg("color_style", self.table.color_style)
.add_arg("modified_time", self.table.modified_time) .add_field_with_arg("modified_time", self.table.modified_time)
.add_arg("create_time", self.table.create_time) .add_field_with_arg("create_time", self.table.create_time)
.add_arg("user_id", self.table.user_id) .add_field_with_arg("user_id", self.table.user_id)
.build()?; .build()?;
Ok((sql, args, app)) Ok((sql, args, app))

View File

@ -21,9 +21,9 @@ pub(crate) async fn create_trash(
) -> Result<(), ServerError> { ) -> Result<(), ServerError> {
for (trash_id, ty) in records { for (trash_id, ty) in records {
let (sql, args) = SqlBuilder::create(TRASH_TABLE) let (sql, args) = SqlBuilder::create(TRASH_TABLE)
.add_arg("id", trash_id) .add_field_with_arg("id", trash_id)
.add_arg("user_id", &user.user_id) .add_field_with_arg("user_id", &user.user_id)
.add_arg("ty", ty) .add_field_with_arg("ty", ty)
.build()?; .build()?;
let _ = sqlx::query_with(&sql, args) let _ = sqlx::query_with(&sql, args)
@ -52,7 +52,7 @@ pub(crate) async fn delete_all_trash(
.collect::<Vec<(Uuid, i32)>>(); .collect::<Vec<(Uuid, i32)>>();
tracing::Span::current().record("delete_rows", &format!("{:?}", rows).as_str()); tracing::Span::current().record("delete_rows", &format!("{:?}", rows).as_str());
let affected_row_count = rows.len(); let affected_row_count = rows.len();
let _ = delete_trash_targets(transaction as &mut DBTransaction<'_>, rows).await?; let _ = delete_trash_associate_targets(transaction as &mut DBTransaction<'_>, rows).await?;
let (sql, args) = SqlBuilder::delete(TRASH_TABLE) let (sql, args) = SqlBuilder::delete(TRASH_TABLE)
.and_where_eq("user_id", &user.user_id) .and_where_eq("user_id", &user.user_id)
@ -84,7 +84,7 @@ pub(crate) async fn delete_trash(
.await .await
.map_err(map_sqlx_error)?; .map_err(map_sqlx_error)?;
let _ = delete_trash_targets( let _ = delete_trash_associate_targets(
transaction as &mut DBTransaction<'_>, transaction as &mut DBTransaction<'_>,
vec![(trash_table.id, trash_table.ty)], vec![(trash_table.id, trash_table.ty)],
) )
@ -101,7 +101,7 @@ pub(crate) async fn delete_trash(
} }
#[tracing::instrument(skip(transaction, targets), err)] #[tracing::instrument(skip(transaction, targets), err)]
async fn delete_trash_targets( async fn delete_trash_associate_targets(
transaction: &mut DBTransaction<'_>, transaction: &mut DBTransaction<'_>,
targets: Vec<(Uuid, i32)>, targets: Vec<(Uuid, i32)>,
) -> Result<(), ServerError> { ) -> Result<(), ServerError> {

View File

@ -77,14 +77,14 @@ impl NewViewSqlBuilder {
let view: View = self.table.clone().into(); let view: View = self.table.clone().into();
let (sql, args) = SqlBuilder::create(VIEW_TABLE) let (sql, args) = SqlBuilder::create(VIEW_TABLE)
.add_arg("id", self.table.id) .add_field_with_arg("id", self.table.id)
.add_arg("belong_to_id", self.table.belong_to_id) .add_field_with_arg("belong_to_id", self.table.belong_to_id)
.add_arg("name", self.table.name) .add_field_with_arg("name", self.table.name)
.add_arg("description", self.table.description) .add_field_with_arg("description", self.table.description)
.add_arg("modified_time", self.table.modified_time) .add_field_with_arg("modified_time", self.table.modified_time)
.add_arg("create_time", self.table.create_time) .add_field_with_arg("create_time", self.table.create_time)
.add_arg("thumbnail", self.table.thumbnail) .add_field_with_arg("thumbnail", self.table.thumbnail)
.add_arg("view_type", self.table.view_type) .add_field_with_arg("view_type", self.table.view_type)
.build()?; .build()?;
Ok((sql, args, view)) Ok((sql, args, view))

View File

@ -56,12 +56,12 @@ impl NewWorkspaceBuilder {
let workspace: Workspace = self.table.clone().into(); let workspace: Workspace = self.table.clone().into();
// TODO: use macro to fetch each field from struct // TODO: use macro to fetch each field from struct
let (sql, args) = SqlBuilder::create(WORKSPACE_TABLE) let (sql, args) = SqlBuilder::create(WORKSPACE_TABLE)
.add_arg("id", self.table.id) .add_field_with_arg("id", self.table.id)
.add_arg("name", self.table.name) .add_field_with_arg("name", self.table.name)
.add_arg("description", self.table.description) .add_field_with_arg("description", self.table.description)
.add_arg("modified_time", self.table.modified_time) .add_field_with_arg("modified_time", self.table.modified_time)
.add_arg("create_time", self.table.create_time) .add_field_with_arg("create_time", self.table.create_time)
.add_arg("user_id", self.table.user_id) .add_field_with_arg("user_id", self.table.user_id)
.build()?; .build()?;
Ok((sql, args, workspace)) Ok((sql, args, workspace))

View File

@ -79,7 +79,7 @@ pub async fn update_doc(pool: &PgPool, mut params: UpdateDocParams) -> Result<()
let (sql, args) = SqlBuilder::update(DOC_TABLE) let (sql, args) = SqlBuilder::update(DOC_TABLE)
.add_some_arg("data", data) .add_some_arg("data", data)
.add_arg("rev_id", params.rev_id) .add_field_with_arg("rev_id", params.rev_id)
.and_where_eq("id", doc_id) .and_where_eq("id", doc_id)
.build()?; .build()?;
@ -128,9 +128,9 @@ impl NewDocSqlBuilder {
pub fn build(self) -> Result<(String, PgArguments), ServerError> { pub fn build(self) -> Result<(String, PgArguments), ServerError> {
let (sql, args) = SqlBuilder::create(DOC_TABLE) let (sql, args) = SqlBuilder::create(DOC_TABLE)
.add_arg("id", self.table.id) .add_field_with_arg("id", self.table.id)
.add_arg("data", self.table.data) .add_field_with_arg("data", self.table.data)
.add_arg("rev_id", self.table.rev_id) .add_field_with_arg("rev_id", self.table.rev_id)
.build()?; .build()?;
Ok((sql, args)) Ok((sql, args))

View File

@ -0,0 +1,165 @@
use crate::{
services::kv_store::{KVStore, KeyValue},
util::sqlx_ext::{map_sqlx_error, SqlBuilder},
};
use actix_web::web::Data;
use anyhow::Context;
use backend_service::errors::ServerError;
use bytes::Bytes;
use lib_infra::future::FutureResultSend;
use sql_builder::{quote, SqlBuilder as RawSqlBuilder};
use sqlx::{postgres::PgArguments, Error, PgPool, Postgres, Row};
const KV_TABLE: &str = "kv_table";
pub(crate) struct PostgresKV {
pub(crate) pg_pool: Data<PgPool>,
}
impl KVStore for PostgresKV {
fn get(&self, key: &str) -> FutureResultSend<Option<Bytes>, ServerError> {
let pg_pool = self.pg_pool.clone();
let id = key.to_string();
FutureResultSend::new(async move {
let mut transaction = pg_pool
.begin()
.await
.context("[KV]:Failed to acquire a Postgres connection")?;
let (sql, args) = SqlBuilder::select(KV_TABLE)
.add_field("*")
.and_where_eq("id", &id)
.build()?;
let result = sqlx::query_as_with::<Postgres, KVTable, PgArguments>(&sql, args)
.fetch_one(&mut transaction)
.await;
let result = match result {
Ok(val) => Ok(Some(Bytes::from(val.blob))),
Err(error) => match error {
Error::RowNotFound => Ok(None),
_ => Err(map_sqlx_error(error)),
},
};
transaction
.commit()
.await
.context("[KV]:Failed to commit SQL transaction.")?;
result
})
}
fn set(&self, key: &str, bytes: Bytes) -> FutureResultSend<(), ServerError> {
self.batch_set(vec![KeyValue {
key: key.to_string(),
value: bytes,
}])
}
fn delete(&self, key: &str) -> FutureResultSend<(), ServerError> {
let pg_pool = self.pg_pool.clone();
let id = key.to_string();
FutureResultSend::new(async move {
let mut transaction = pg_pool
.begin()
.await
.context("[KV]:Failed to acquire a Postgres connection")?;
let (sql, args) = SqlBuilder::delete(KV_TABLE).and_where_eq("id", &id).build()?;
let _ = sqlx::query_with(&sql, args)
.execute(&mut transaction)
.await
.map_err(map_sqlx_error)?;
transaction
.commit()
.await
.context("[KV]:Failed to commit SQL transaction.")?;
Ok(())
})
}
fn batch_set(&self, kvs: Vec<KeyValue>) -> FutureResultSend<(), ServerError> {
let pg_pool = self.pg_pool.clone();
FutureResultSend::new(async move {
let mut transaction = pg_pool
.begin()
.await
.context("[KV]:Failed to acquire a Postgres connection")?;
let mut builder = RawSqlBuilder::insert_into(KV_TABLE);
let mut m_builder = builder.field("id").field("blob");
for kv in kvs {
let s = match std::str::from_utf8(&kv.value) {
Ok(v) => v,
Err(e) => {
log::error!("[KV]: {}", e);
""
},
};
m_builder = m_builder.values(&[quote(kv.key), quote(s)]);
}
let sql = m_builder.sql()?;
let _ = sqlx::query(&sql)
.execute(&mut transaction)
.await
.map_err(map_sqlx_error)?;
transaction
.commit()
.await
.context("[KV]:Failed to commit SQL transaction.")?;
Ok::<(), ServerError>(())
})
}
fn batch_get(&self, keys: Vec<String>) -> FutureResultSend<Vec<KeyValue>, ServerError> {
let pg_pool = self.pg_pool.clone();
FutureResultSend::new(async move {
let mut transaction = pg_pool
.begin()
.await
.context("[KV]:Failed to acquire a Postgres connection")?;
let sql = RawSqlBuilder::select_from(KV_TABLE)
.field("id")
.field("blob")
.and_where_in_quoted("id", &keys)
.sql()?;
let rows = sqlx::query(&sql)
.fetch_all(&mut transaction)
.await
.map_err(map_sqlx_error)?;
let kvs = rows
.into_iter()
.map(|row| {
let bytes: Vec<u8> = row.get("blob");
KeyValue {
key: row.get("id"),
value: Bytes::from(bytes),
}
})
.collect::<Vec<KeyValue>>();
transaction
.commit()
.await
.context("[KV]:Failed to commit SQL transaction.")?;
Ok::<Vec<KeyValue>, ServerError>(kvs)
})
}
}
#[derive(Debug, Clone, sqlx::FromRow)]
struct KVTable {
pub(crate) id: String,
pub(crate) blob: Vec<u8>,
}

View File

@ -0,0 +1,21 @@
mod kv;
use bytes::Bytes;
pub(crate) use kv::*;
use backend_service::errors::ServerError;
use lib_infra::future::FutureResultSend;
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct KeyValue {
pub key: String,
pub value: Bytes,
}
pub trait KVStore: Send + Sync {
fn get(&self, key: &str) -> FutureResultSend<Option<Bytes>, ServerError>;
fn set(&self, key: &str, value: Bytes) -> FutureResultSend<(), ServerError>;
fn delete(&self, key: &str) -> FutureResultSend<(), ServerError>;
fn batch_set(&self, kvs: Vec<KeyValue>) -> FutureResultSend<(), ServerError>;
fn batch_get(&self, keys: Vec<String>) -> FutureResultSend<Vec<KeyValue>, ServerError>;
}

View File

@ -1,4 +1,5 @@
pub mod core; pub mod core;
pub mod document; pub mod document;
pub mod kv_store;
pub mod user; pub mod user;
pub mod web_socket; pub mod web_socket;

View File

@ -52,7 +52,7 @@ impl SqlBuilder {
builder builder
} }
pub fn add_arg<'a, T>(mut self, field: &str, arg: T) -> Self pub fn add_field_with_arg<'a, T>(mut self, field: &str, arg: T) -> Self
where where
T: 'a + Send + Encode<'a, Postgres> + Type<Postgres>, T: 'a + Send + Encode<'a, Postgres> + Type<Postgres>,
{ {
@ -67,7 +67,7 @@ impl SqlBuilder {
T: 'a + Send + Encode<'a, Postgres> + Type<Postgres>, T: 'a + Send + Encode<'a, Postgres> + Type<Postgres>,
{ {
if add { if add {
self.add_arg(field, arg) self.add_field_with_arg(field, arg)
} else { } else {
self self
} }
@ -78,7 +78,7 @@ impl SqlBuilder {
T: 'a + Send + Encode<'a, Postgres> + Type<Postgres>, T: 'a + Send + Encode<'a, Postgres> + Type<Postgres>,
{ {
if let Some(arg) = arg { if let Some(arg) = arg {
self.add_arg(field, arg) self.add_field_with_arg(field, arg)
} else { } else {
self self
} }

View File

@ -1,3 +0,0 @@
mod auth;
mod doc;
mod workspace;

View File

@ -0,0 +1,51 @@
use crate::util::helper::spawn_server;
use backend::services::kv_store::KeyValue;
use std::str;
#[actix_rt::test]
async fn kv_set_test() {
let server = spawn_server().await;
let kv = server.app_ctx.kv_store.clone();
let s1 = "123".to_string();
let key = "1";
let _ = kv.set(key, s1.clone().into()).await.unwrap();
let bytes = kv.get(key).await.unwrap().unwrap();
let s2 = str::from_utf8(&bytes).unwrap();
assert_eq!(s1, s2);
}
#[actix_rt::test]
async fn kv_delete_test() {
let server = spawn_server().await;
let kv = server.app_ctx.kv_store.clone();
let s1 = "123".to_string();
let key = "1";
let _ = kv.set(key, s1.clone().into()).await.unwrap();
let _ = kv.delete(key).await.unwrap();
assert_eq!(kv.get(key).await.unwrap(), None);
}
#[actix_rt::test]
async fn kv_batch_set_test() {
let server = spawn_server().await;
let kv = server.app_ctx.kv_store.clone();
let kvs = vec![
KeyValue {
key: "1".to_string(),
value: "a".to_string().into(),
},
KeyValue {
key: "2".to_string(),
value: "b".to_string().into(),
},
];
kv.batch_set(kvs.clone()).await.unwrap();
let kvs_from_db = kv
.batch_get(kvs.clone().into_iter().map(|value| value.key).collect::<Vec<String>>())
.await
.unwrap();
assert_eq!(kvs, kvs_from_db);
}

View File

@ -0,0 +1,3 @@
mod auth_test;
mod kv_test;
mod workspace_test;

View File

@ -70,7 +70,7 @@ async fn delta_sync_while_editing_with_attribute() {
// │ops: ["123", "456"] rev: 2│ │ │ // │ops: ["123", "456"] rev: 2│ │ │
// └──────────────────────────┘ │ │ // └──────────────────────────┘ │ │
// │ │ // │ │
// ◀── http request ─┤ Open document // ◀── http request ─┤ Open document_test
// │ │ // │ │
// │ │ ┌──────────────────────────┐ // │ │ ┌──────────────────────────┐
// ├──http response──┼─▶│ops: ["123", "456"] rev: 2│ // ├──http response──┼─▶│ops: ["123", "456"] rev: 2│
@ -115,7 +115,7 @@ async fn delta_sync_with_server_push_delta() {
// └─────────┘ └─────────┘ // └─────────┘ └─────────┘
// │ │ // │ │
// │ │ // │ │
// ◀── http request ─┤ Open document // ◀── http request ─┤ Open document_test
// │ │ // │ │
// │ │ ┌───────────────┐ // │ │ ┌───────────────┐
// ├──http response──┼─▶│ops: [] rev: 0 │ // ├──http response──┼─▶│ops: [] rev: 0 │
@ -165,7 +165,7 @@ async fn delta_sync_while_local_rev_less_than_server_rev() {
// ┌───────────────────┐ │ │ // ┌───────────────────┐ │ │
// │ops: ["123"] rev: 1│ │ │ // │ops: ["123"] rev: 1│ │ │
// └───────────────────┘ │ │ // └───────────────────┘ │ │
// ◀── http request ─┤ Open document // ◀── http request ─┤ Open document_test
// │ │ // │ │
// │ │ ┌───────────────┐ // │ │ ┌───────────────┐
// ├──http response──┼──▶│ops: [123] rev:│ // ├──http response──┼──▶│ops: [123] rev:│

View File

@ -1,2 +1,3 @@
// mod edit_script; // mod edit_script;
// mod edit_test; // mod edit_test;
mod crud_test;

View File

@ -1,3 +1,3 @@
mod api; mod api_test;
mod document; mod document_test;
pub mod util; pub mod util;

View File

@ -150,7 +150,7 @@ impl TestUserServer {
} }
pub async fn read_doc(&self, params: DocIdentifier) -> Option<Doc> { pub async fn read_doc(&self, params: DocIdentifier) -> Option<Doc> {
let url = format!("{}/api/document", self.http_addr()); let url = format!("{}/api/document_test", self.http_addr());
let doc = read_doc_request(self.user_token(), params, &url).await.unwrap(); let doc = read_doc_request(self.user_token(), params, &url).await.unwrap();
doc doc
} }

View File

@ -45,8 +45,8 @@ fn parse_files_protobuf(proto_crate_path: &str, proto_output_dir: &str) -> Vec<P
} }
// https://docs.rs/syn/1.0.54/syn/struct.File.html // https://docs.rs/syn/1.0.54/syn/struct.File.html
let ast = let ast = syn::parse_file(read_file(&path).unwrap().as_ref())
syn::parse_file(read_file(&path).unwrap().as_ref()).expect(&format!("Unable to parse file at {}", path)); .unwrap_or_else(|_| panic!("Unable to parse file at {}", path));
let structs = get_ast_structs(&ast); let structs = get_ast_structs(&ast);
let proto_file_path = format!("{}/{}.proto", &proto_output_dir, &file_name); let proto_file_path = format!("{}/{}.proto", &proto_output_dir, &file_name);
let mut proto_file_content = parse_or_init_proto_file(proto_file_path.as_ref()); let mut proto_file_content = parse_or_init_proto_file(proto_file_path.as_ref());