feat: initial file upload api (#4299)

* feat: initial file upload api

* feat: initial file upload api

* fix: add pb index

* feat: remove file name

* feat: read everything to mem

* feat: revamp object storage

* chore: cargo format

* chore: update deps

* feat: revised implementations and style

* chore: use deploy env instead

* chore: use deploy env instead

* chore: use deploy env instead

* refactor: move logic to handler to manager

* fix: format issues

* fix: cargo clippy

* chore: cargo check tauri

* fix: debug docker integration test

* fix: debug docker integration test

* fix: debug docker integration test gotrue

* fix: debug docker integration test docker compose version

* fix: docker scripts

* fix: cargo fmt

* fix: add sleep after docker compose up

---------

Co-authored-by: nathan <nathan@appflowy.io>
This commit is contained in:
Zack 2024-01-17 02:59:15 +08:00 committed by GitHub
parent 15cb1b5f19
commit 38c3e700e9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
30 changed files with 757 additions and 403 deletions

View File

@ -248,8 +248,9 @@ jobs:
working-directory: AppFlowy-Cloud
run: |
docker compose down -v --remove-orphans
docker pull appflowyinc/appflowy_cloud:latest
docker compose pull
docker compose up -d
sleep 10
- name: Checkout source code
uses: actions/checkout@v2
@ -298,6 +299,7 @@ jobs:
export DISPLAY=:99
sudo Xvfb -ac :99 -screen 0 1280x1024x24 > /dev/null 2>&1 &
sudo apt-get install network-manager
docker ps -a
flutter test integration_test/cloud/cloud_runner.dart -d Linux --coverage
shell: bash

View File

@ -104,4 +104,4 @@ jobs:
- name: clippy rust-lib
run: cargo clippy --all-targets -- -D warnings
working-directory: frontend/rust-lib
working-directory: frontend/rust-lib

View File

@ -614,7 +614,7 @@ dependencies = [
"num-traits",
"serde",
"wasm-bindgen",
"windows-targets",
"windows-targets 0.48.0",
]
[[package]]
@ -1187,12 +1187,12 @@ dependencies = [
[[package]]
name = "ctor"
version = "0.1.26"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096"
checksum = "30d2b3721e861707777e3195b0158f950ae6dc4a27e4d02ff9f67e3eb3de199e"
dependencies = [
"quote",
"syn 1.0.109",
"syn 2.0.47",
]
[[package]]
@ -1784,6 +1784,7 @@ dependencies = [
"flowy-notification",
"flowy-storage",
"futures",
"fxhash",
"getrandom 0.2.10",
"indexmap 2.1.0",
"lib-dispatch",
@ -1991,10 +1992,13 @@ dependencies = [
"bytes",
"flowy-error",
"lib-infra",
"mime",
"mime_guess",
"reqwest",
"serde",
"serde_json",
"tokio",
"tracing",
"url",
]
@ -2921,9 +2925,9 @@ dependencies = [
[[package]]
name = "infer"
version = "0.12.0"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a898e4b7951673fce96614ce5751d13c40fc5674bc2d759288e46c3ab62598b3"
checksum = "f551f8c3a39f68f986517db0d1759de85881894fdc7db798bd2a9df9cb04b7fc"
dependencies = [
"cfb",
]
@ -3047,9 +3051,9 @@ dependencies = [
[[package]]
name = "json-patch"
version = "1.0.0"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f54898088ccb91df1b492cc80029a6fdf1c48ca0db7c6822a8babad69c94658"
checksum = "55ff1e1486799e3f64129f8ccad108b38290df9cd7015cd31bed17239f0789d6"
dependencies = [
"serde",
"serde_json",
@ -3784,7 +3788,7 @@ dependencies = [
"libc",
"redox_syscall 0.3.5",
"smallvec",
"windows-targets",
"windows-targets 0.48.0",
]
[[package]]
@ -3927,9 +3931,7 @@ version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259"
dependencies = [
"phf_macros 0.10.0",
"phf_shared 0.10.0",
"proc-macro-hack",
]
[[package]]
@ -4016,20 +4018,6 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "phf_macros"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58fdf3184dd560f160dd73922bea2d5cd6e8f064bf4b13110abd81b03697b4e0"
dependencies = [
"phf_generator 0.10.0",
"phf_shared 0.10.0",
"proc-macro-hack",
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "phf_macros"
version = "0.11.2"
@ -5813,9 +5801,9 @@ dependencies = [
[[package]]
name = "tauri-utils"
version = "1.5.0"
version = "1.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34d55e185904a84a419308d523c2c6891d5e2dbcee740c4997eb42e75a7b0f46"
checksum = "ece74810b1d3d44f29f732a7ae09a63183d63949bbdd59c61f8ed2a1b70150db"
dependencies = [
"brotli",
"ctor",
@ -5828,7 +5816,7 @@ dependencies = [
"kuchikiki",
"log",
"memchr",
"phf 0.10.1",
"phf 0.11.2",
"proc-macro2",
"quote",
"semver",
@ -5838,7 +5826,7 @@ dependencies = [
"thiserror",
"url",
"walkdir",
"windows 0.39.0",
"windows-version",
]
[[package]]
@ -6844,7 +6832,7 @@ version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f"
dependencies = [
"windows-targets",
"windows-targets 0.48.0",
]
[[package]]
@ -6894,7 +6882,7 @@ version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
dependencies = [
"windows-targets",
"windows-targets 0.48.0",
]
[[package]]
@ -6912,12 +6900,36 @@ dependencies = [
"windows_x86_64_msvc 0.48.0",
]
[[package]]
name = "windows-targets"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd"
dependencies = [
"windows_aarch64_gnullvm 0.52.0",
"windows_aarch64_msvc 0.52.0",
"windows_i686_gnu 0.52.0",
"windows_i686_msvc 0.52.0",
"windows_x86_64_gnu 0.52.0",
"windows_x86_64_gnullvm 0.52.0",
"windows_x86_64_msvc 0.52.0",
]
[[package]]
name = "windows-tokens"
version = "0.39.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f838de2fe15fe6bac988e74b798f26499a8b21a9d97edec321e79b28d1d7f597"
[[package]]
name = "windows-version"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75aa004c988e080ad34aff5739c39d0312f4684699d6d71fc8a198d057b8b9b4"
dependencies = [
"windows-targets 0.52.0",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.42.2"
@ -6930,6 +6942,12 @@ version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea"
[[package]]
name = "windows_aarch64_msvc"
version = "0.39.0"
@ -6948,6 +6966,12 @@ version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef"
[[package]]
name = "windows_i686_gnu"
version = "0.39.0"
@ -6966,6 +6990,12 @@ version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
[[package]]
name = "windows_i686_gnu"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313"
[[package]]
name = "windows_i686_msvc"
version = "0.39.0"
@ -6984,6 +7014,12 @@ version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
[[package]]
name = "windows_i686_msvc"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a"
[[package]]
name = "windows_x86_64_gnu"
version = "0.39.0"
@ -7002,6 +7038,12 @@ version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.42.2"
@ -7014,6 +7056,12 @@ version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e"
[[package]]
name = "windows_x86_64_msvc"
version = "0.39.0"
@ -7032,6 +7080,12 @@ version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04"
[[package]]
name = "winnow"
version = "0.4.7"

View File

@ -14,7 +14,7 @@ rust-version = "1.57"
tauri-build = { version = "1.5", features = [] }
[workspace.dependencies]
anyhow = "1.0.75"
anyhow = "1.0"
tracing = "0.1.40"
bytes = "1.5.0"
serde = "1.0.108"
@ -35,7 +35,7 @@ lru = "0.12.0"
serde_json.workspace = true
serde.workspace = true
tauri = { version = "1.5", features = ["clipboard-all", "fs-all", "shell-open"] }
tauri-utils = "1.5"
tauri-utils = "1.5.2"
bytes.workspace = true
tracing.workspace = true
lib-dispatch = { path = "../../rust-lib/lib-dispatch", features = ["use_serde"] }
@ -74,7 +74,3 @@ collab-database = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev
collab-plugins = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "3c2cb055e47ec9d6bff3d3aeb2a476b85d02cb80" }
collab-user = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "3c2cb055e47ec9d6bff3d3aeb2a476b85d02cb80" }
collab-entity = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "3c2cb055e47ec9d6bff3d3aeb2a476b85d02cb80" }

View File

@ -1777,6 +1777,7 @@ dependencies = [
"flowy-notification",
"flowy-storage",
"futures",
"fxhash",
"getrandom 0.2.10",
"indexmap 2.1.0",
"lib-dispatch",
@ -1993,10 +1994,13 @@ dependencies = [
"bytes",
"flowy-error",
"lib-infra",
"mime",
"mime_guess",
"reqwest",
"serde",
"serde_json",
"tokio",
"tracing",
"url",
]

View File

@ -59,7 +59,7 @@ flowy-storage = { workspace = true, path = "flowy-storage" }
collab-integrate = { workspace = true, path = "collab-integrate" }
flowy-ai = { workspace = true, path = "flowy-ai" }
flowy-date = { workspace = true, path = "flowy-date" }
anyhow = "1.0.75"
anyhow = "1.0"
tracing = "0.1.40"
bytes = "1.5.0"
serde_json = "1.0.108"

View File

@ -1,118 +1,118 @@
use std::fs::File;
use std::io::{Cursor, Read};
use std::path::Path;
use uuid::Uuid;
use zip::ZipArchive;
use flowy_storage::StorageObject;
use crate::document::supabase_test::helper::FlowySupabaseDocumentTest;
#[tokio::test]
async fn supabase_document_upload_text_file_test() {
if let Some(test) = FlowySupabaseDocumentTest::new().await {
let workspace_id = test.get_current_workspace().await.id;
let storage_service = test
.document_manager
.get_file_storage_service()
.upgrade()
.unwrap();
let object = StorageObject::from_bytes(
&workspace_id,
&Uuid::new_v4().to_string(),
"hello world".as_bytes(),
"text/plain".to_string(),
);
let url = storage_service.create_object(object).await.unwrap();
let bytes = storage_service
.get_object_by_url(url.clone())
.await
.unwrap();
let s = String::from_utf8(bytes.to_vec()).unwrap();
assert_eq!(s, "hello world");
// Delete the text file
let _ = storage_service.delete_object_by_url(url).await;
}
}
#[tokio::test]
async fn supabase_document_upload_zip_file_test() {
if let Some(test) = FlowySupabaseDocumentTest::new().await {
let workspace_id = test.get_current_workspace().await.id;
let storage_service = test
.document_manager
.get_file_storage_service()
.upgrade()
.unwrap();
// Upload zip file
let object = StorageObject::from_file(
&workspace_id,
&Uuid::new_v4().to_string(),
"./tests/asset/test.txt.zip",
);
let url = storage_service.create_object(object).await.unwrap();
// Read zip file
let zip_data = storage_service
.get_object_by_url(url.clone())
.await
.unwrap();
let reader = Cursor::new(zip_data);
let mut archive = ZipArchive::new(reader).unwrap();
for i in 0..archive.len() {
let mut file = archive.by_index(i).unwrap();
let name = file.name().to_string();
let mut out = Vec::new();
file.read_to_end(&mut out).unwrap();
if name.starts_with("__MACOSX/") {
continue;
}
assert_eq!(name, "test.txt");
assert_eq!(String::from_utf8(out).unwrap(), "hello world");
}
// Delete the zip file
let _ = storage_service.delete_object_by_url(url).await;
}
}
#[tokio::test]
async fn supabase_document_upload_image_test() {
if let Some(test) = FlowySupabaseDocumentTest::new().await {
let workspace_id = test.get_current_workspace().await.id;
let storage_service = test
.document_manager
.get_file_storage_service()
.upgrade()
.unwrap();
// Upload zip file
let object = StorageObject::from_file(
&workspace_id,
&Uuid::new_v4().to_string(),
"./tests/asset/logo.png",
);
let url = storage_service.create_object(object).await.unwrap();
let image_data = storage_service
.get_object_by_url(url.clone())
.await
.unwrap();
// Read the image file
let mut file = File::open(Path::new("./tests/asset/logo.png")).unwrap();
let mut local_data = Vec::new();
file.read_to_end(&mut local_data).unwrap();
assert_eq!(image_data, local_data);
// Delete the image
let _ = storage_service.delete_object_by_url(url).await;
}
}
// use std::fs::File;
// use std::io::{Cursor, Read};
// use std::path::Path;
//
// use uuid::Uuid;
// use zip::ZipArchive;
//
// use flowy_storage::StorageObject;
//
// use crate::document::supabase_test::helper::FlowySupabaseDocumentTest;
//
// #[tokio::test]
// async fn supabase_document_upload_text_file_test() {
// if let Some(test) = FlowySupabaseDocumentTest::new().await {
// let workspace_id = test.get_current_workspace().await.id;
// let storage_service = test
// .document_manager
// .get_file_storage_service()
// .upgrade()
// .unwrap();
//
// let object = StorageObject::from_bytes(
// &workspace_id,
// &Uuid::new_v4().to_string(),
// "hello world".as_bytes(),
// "text/plain".to_string(),
// );
//
// let url = storage_service.create_object(object).await.unwrap();
//
// let bytes = storage_service
// .get_object(url.clone())
// .await
// .unwrap();
// let s = String::from_utf8(bytes.to_vec()).unwrap();
// assert_eq!(s, "hello world");
//
// // Delete the text file
// let _ = storage_service.delete_object(url).await;
// }
// }
//
// #[tokio::test]
// async fn supabase_document_upload_zip_file_test() {
// if let Some(test) = FlowySupabaseDocumentTest::new().await {
// let workspace_id = test.get_current_workspace().await.id;
// let storage_service = test
// .document_manager
// .get_file_storage_service()
// .upgrade()
// .unwrap();
//
// // Upload zip file
// let object = StorageObject::from_file(
// &workspace_id,
// &Uuid::new_v4().to_string(),
// "./tests/asset/test.txt.zip",
// );
// let url = storage_service.create_object(object).await.unwrap();
//
// // Read zip file
// let zip_data = storage_service
// .get_object(url.clone())
// .await
// .unwrap();
// let reader = Cursor::new(zip_data);
// let mut archive = ZipArchive::new(reader).unwrap();
// for i in 0..archive.len() {
// let mut file = archive.by_index(i).unwrap();
// let name = file.name().to_string();
// let mut out = Vec::new();
// file.read_to_end(&mut out).unwrap();
//
// if name.starts_with("__MACOSX/") {
// continue;
// }
// assert_eq!(name, "test.txt");
// assert_eq!(String::from_utf8(out).unwrap(), "hello world");
// }
//
// // Delete the zip file
// let _ = storage_service.delete_object(url).await;
// }
// }
// #[tokio::test]
// async fn supabase_document_upload_image_test() {
// if let Some(test) = FlowySupabaseDocumentTest::new().await {
// let workspace_id = test.get_current_workspace().await.id;
// let storage_service = test
// .document_manager
// .get_file_storage_service()
// .upgrade()
// .unwrap();
//
// // Upload zip file
// let object = StorageObject::from_file(
// &workspace_id,
// &Uuid::new_v4().to_string(),
// "./tests/asset/logo.png",
// );
// let url = storage_service.create_object(object).await.unwrap();
//
// let image_data = storage_service
// .get_object(url.clone())
// .await
// .unwrap();
//
// // Read the image file
// let mut file = File::open(Path::new("./tests/asset/logo.png")).unwrap();
// let mut local_data = Vec::new();
// file.read_to_end(&mut local_data).unwrap();
//
// assert_eq!(image_data, local_data);
//
// // Delete the image
// let _ = storage_service.delete_object(url).await;
// }
// }

View File

@ -8,7 +8,7 @@ use flowy_document::entities::{DocumentSnapshotData, DocumentSnapshotMeta};
use flowy_document::manager::{DocumentManager, DocumentSnapshotService, DocumentUserService};
use flowy_document_pub::cloud::DocumentCloudService;
use flowy_error::{FlowyError, FlowyResult};
use flowy_storage::FileStorageService;
use flowy_storage::ObjectStorageService;
use flowy_user::services::authenticate_user::AuthenticateUser;
pub struct DocumentDepsResolver();
@ -18,7 +18,7 @@ impl DocumentDepsResolver {
_database_manager: &Arc<DatabaseManager>,
collab_builder: Arc<AppFlowyCollabBuilder>,
cloud_service: Arc<dyn DocumentCloudService>,
storage_service: Weak<dyn FileStorageService>,
storage_service: Weak<dyn ObjectStorageService>,
) -> Arc<DocumentManager> {
let user_service: Arc<dyn DocumentUserService> =
Arc::new(DocumentUserImpl(authenticate_user.clone()));

View File

@ -1,7 +1,7 @@
use flowy_storage::{ObjectIdentity, ObjectStorageService};
use std::sync::Arc;
use anyhow::Error;
use bytes::Bytes;
use client_api::collab_sync::{SinkConfig, SinkStrategy, SyncObject, SyncPlugin};
use collab::core::collab::CollabDocState;
use collab::core::origin::{CollabClient, CollabOrigin};
@ -23,35 +23,43 @@ use flowy_folder_pub::cloud::{
};
use flowy_server_pub::af_cloud_config::AFCloudConfiguration;
use flowy_server_pub::supabase_config::SupabaseConfiguration;
use flowy_storage::{FileStorageService, StorageObject};
use flowy_storage::ObjectValue;
use flowy_user_pub::cloud::{UserCloudService, UserCloudServiceProvider};
use flowy_user_pub::entities::{Authenticator, UserTokenState};
use lib_infra::future::{to_fut, Fut, FutureResult};
use crate::integrate::server::{Server, ServerProvider};
impl FileStorageService for ServerProvider {
fn create_object(&self, object: StorageObject) -> FutureResult<String, FlowyError> {
impl ObjectStorageService for ServerProvider {
fn get_object_url(&self, object_id: ObjectIdentity) -> FutureResult<String, FlowyError> {
let server = self.get_server();
FutureResult::new(async move {
let storage = server?.file_storage().ok_or(FlowyError::internal())?;
storage.create_object(object).await
storage.get_object_url(object_id).await
})
}
fn delete_object_by_url(&self, object_url: String) -> FutureResult<(), FlowyError> {
fn put_object(&self, url: String, val: ObjectValue) -> FutureResult<(), FlowyError> {
let server = self.get_server();
FutureResult::new(async move {
let storage = server?.file_storage().ok_or(FlowyError::internal())?;
storage.delete_object_by_url(object_url).await
storage.put_object(url, val).await
})
}
fn get_object_by_url(&self, object_url: String) -> FutureResult<Bytes, FlowyError> {
fn delete_object(&self, url: String) -> FutureResult<(), FlowyError> {
let server = self.get_server();
FutureResult::new(async move {
let storage = server?.file_storage().ok_or(FlowyError::internal())?;
storage.get_object_by_url(object_url).await
storage.delete_object(url).await
})
}
fn get_object(&self, url: String) -> FutureResult<flowy_storage::ObjectValue, FlowyError> {
let server = self.get_server();
FutureResult::new(async move {
let storage = server?.file_storage().ok_or(FlowyError::internal())?;
storage.get_object(url).await
})
}
}

View File

@ -1,5 +1,6 @@
#![allow(unused_doc_comments)]
use flowy_storage::ObjectStorageService;
use std::sync::Arc;
use std::time::Duration;
@ -12,7 +13,6 @@ use flowy_database2::DatabaseManager;
use flowy_document::manager::DocumentManager;
use flowy_folder::manager::FolderManager;
use flowy_sqlite::kv::StorePreferences;
use flowy_storage::FileStorageService;
use flowy_user::services::authenticate_user::AuthenticateUser;
use flowy_user::services::entities::UserConfig;
use flowy_user::user_manager::UserManager;
@ -146,7 +146,7 @@ impl AppFlowyCore {
&database_manager,
collab_builder.clone(),
server_provider.clone(),
Arc::downgrade(&(server_provider.clone() as Arc<dyn FileStorageService>)),
Arc::downgrade(&(server_provider.clone() as Arc<dyn ObjectStorageService>)),
);
let folder_manager = FolderDepsResolver::resolve(

View File

@ -36,6 +36,7 @@ futures.workspace = true
tokio-stream = { workspace = true, features = ["sync"] }
scraper = "0.18.0"
lru.workspace = true
fxhash = "0.2.1"
[target.'cfg(target_arch = "wasm32")'.dependencies]
getrandom = { version = "0.2", features = ["js"]}

View File

@ -5,6 +5,8 @@ use collab_document::blocks::{json_str_to_hashmap, Block, BlockAction, DocumentD
use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
use flowy_error::ErrorCode;
use lib_infra::validator_fn::{required_not_empty_str, required_valid_path};
use validator::Validate;
use crate::parse::{NotEmptyStr, NotEmptyVec};
@ -62,6 +64,28 @@ pub struct DocumentRedoUndoResponsePB {
pub is_success: bool,
}
#[derive(Default, ProtoBuf, Validate)]
pub struct UploadFileParamsPB {
#[pb(index = 1)]
#[validate(custom = "required_not_empty_str")]
pub workspace_id: String,
#[pb(index = 2)]
#[validate(custom = "required_valid_path")]
pub local_file_path: String,
}
#[derive(Default, ProtoBuf, Validate)]
pub struct UploadedFilePB {
#[pb(index = 1)]
#[validate(url)]
pub url: String,
#[pb(index = 2)]
#[validate(custom = "required_valid_path")]
pub local_file_path: String,
}
#[derive(Default, ProtoBuf)]
pub struct CreateDocumentPayloadPB {
#[pb(index = 1)]

View File

@ -9,10 +9,10 @@ use std::sync::{Arc, Weak};
use collab_document::blocks::{
BlockAction, BlockActionPayload, BlockActionType, BlockEvent, BlockEventPayload, DeltaType,
};
use tracing::instrument;
use flowy_error::{FlowyError, FlowyResult};
use lib_dispatch::prelude::{data_result_ok, AFPluginData, AFPluginState, DataResult};
use tracing::instrument;
use crate::entities::*;
use crate::parser::document_data_parser::DocumentDataParser;
@ -401,3 +401,50 @@ pub(crate) async fn convert_data_to_json_handler(
data_result_ok(ConvertDataToJsonResponsePB { json: result })
}
// Handler for uploading a file
// `workspace_id` and `file_name` determines file identity
pub(crate) async fn upload_file_handler(
params: AFPluginData<UploadFileParamsPB>,
manager: AFPluginState<Weak<DocumentManager>>,
) -> DataResult<UploadedFilePB, FlowyError> {
let AFPluginData(UploadFileParamsPB {
workspace_id,
local_file_path,
}) = params;
let manager = upgrade_document(manager)?;
let url = manager.upload_file(workspace_id, &local_file_path).await?;
Ok(AFPluginData(UploadedFilePB {
url,
local_file_path,
}))
}
#[instrument(level = "debug", skip_all, err)]
pub(crate) async fn download_file_handler(
params: AFPluginData<UploadedFilePB>,
manager: AFPluginState<Weak<DocumentManager>>,
) -> FlowyResult<()> {
let AFPluginData(UploadedFilePB {
url,
local_file_path,
}) = params;
let manager = upgrade_document(manager)?;
manager.download_file(local_file_path, url).await
}
// Handler for deleting file
pub(crate) async fn delete_file_handler(
params: AFPluginData<UploadedFilePB>,
manager: AFPluginState<Weak<DocumentManager>>,
) -> FlowyResult<()> {
let AFPluginData(UploadedFilePB {
url,
local_file_path,
}) = params;
let manager = upgrade_document(manager)?;
manager.delete_file(local_file_path, url).await
}

View File

@ -39,6 +39,9 @@ pub fn init(document_manager: Weak<DocumentManager>) -> AFPlugin {
DocumentEvent::ConvertDataToJSON,
convert_data_to_json_handler,
)
.event(DocumentEvent::UploadFile, upload_file_handler)
.event(DocumentEvent::DownloadFile, download_file_handler)
.event(DocumentEvent::DeleteFile, delete_file_handler)
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Display, ProtoBuf_Enum, Flowy_Event)]
@ -108,4 +111,11 @@ pub enum DocumentEvent {
#[event(input = "DocumentSnapshotMetaPB", output = "DocumentSnapshotPB")]
GetDocumentSnapshot = 14,
#[event(input = "UploadFileParamsPB", output = "UploadedFilePB")]
UploadFile = 15,
#[event(input = "UploadedFilePB")]
DownloadFile = 16,
#[event(input = "UploadedFilePB")]
DeleteFile = 17,
}

View File

@ -10,15 +10,21 @@ use collab_document::blocks::DocumentData;
use collab_document::document::Document;
use collab_document::document_data::default_document_data;
use collab_entity::CollabType;
use flowy_storage::ObjectIdentity;
use flowy_storage::ObjectValue;
use lru::LruCache;
use parking_lot::Mutex;
use tokio::io::AsyncWriteExt;
use tracing::error;
use tracing::info;
use tracing::warn;
use tracing::{event, instrument};
use collab_integrate::collab_builder::{AppFlowyCollabBuilder, CollabBuilderConfig};
use collab_integrate::{CollabKVAction, CollabKVDB, CollabPersistenceConfig};
use flowy_document_pub::cloud::DocumentCloudService;
use flowy_error::{internal_error, ErrorCode, FlowyError, FlowyResult};
use flowy_storage::FileStorageService;
use flowy_storage::ObjectStorageService;
use crate::document::MutexDocument;
use crate::entities::{
@ -45,7 +51,7 @@ pub struct DocumentManager {
collab_builder: Arc<AppFlowyCollabBuilder>,
documents: Arc<Mutex<LruCache<String, Arc<MutexDocument>>>>,
cloud_service: Arc<dyn DocumentCloudService>,
storage_service: Weak<dyn FileStorageService>,
storage_service: Weak<dyn ObjectStorageService>,
snapshot_service: Arc<dyn DocumentSnapshotService>,
}
@ -54,7 +60,7 @@ impl DocumentManager {
user_service: Arc<dyn DocumentUserService>,
collab_builder: Arc<AppFlowyCollabBuilder>,
cloud_service: Arc<dyn DocumentCloudService>,
storage_service: Weak<dyn FileStorageService>,
storage_service: Weak<dyn ObjectStorageService>,
snapshot_service: Arc<dyn DocumentSnapshotService>,
) -> Self {
let documents = Arc::new(Mutex::new(LruCache::new(NonZeroUsize::new(10).unwrap())));
@ -246,6 +252,73 @@ impl DocumentManager {
Ok(snapshot)
}
pub async fn upload_file(
&self,
workspace_id: String,
local_file_path: &str,
) -> FlowyResult<String> {
let object_value = ObjectValue::from_file(local_file_path).await?;
let storage_service = self.storage_service_upgrade()?;
let url = {
let hash = fxhash::hash(object_value.raw.as_ref());
storage_service
.get_object_url(ObjectIdentity {
workspace_id: workspace_id.to_owned(),
file_id: hash.to_string(),
})
.await?
};
// let the upload happen in the background
let clone_url = url.clone();
tokio::spawn(async move {
if let Err(e) = storage_service.put_object(clone_url, object_value).await {
error!("upload file failed: {}", e);
}
});
Ok(url)
}
pub async fn download_file(&self, local_file_path: String, url: String) -> FlowyResult<()> {
if tokio::fs::metadata(&local_file_path).await.is_ok() {
warn!("file already exist in user local disk: {}", local_file_path);
return Ok(());
}
let storage_service = self.storage_service_upgrade()?;
let object_value = storage_service.get_object(url).await?;
// create file if not exist
let mut file = tokio::fs::OpenOptions::new()
.create(true)
.write(true)
.open(&local_file_path)
.await?;
let n = file.write(&object_value.raw).await?;
info!("downloaded {} bytes to file: {}", n, local_file_path);
Ok(())
}
pub async fn delete_file(&self, local_file_path: String, url: String) -> FlowyResult<()> {
// delete file from local
tokio::fs::remove_file(local_file_path).await?;
// delete from cloud
let storage_service = self.storage_service_upgrade()?;
tokio::spawn(async move {
if let Err(e) = storage_service.delete_object(url).await {
// TODO: add WAL to log the delete operation.
// keep a list of files to be deleted, and retry later
error!("delete file failed: {}", e);
}
});
Ok(())
}
async fn collab_for_document(
&self,
uid: i64,
@ -279,6 +352,13 @@ impl DocumentManager {
}
}
fn storage_service_upgrade(&self) -> FlowyResult<Arc<dyn ObjectStorageService>> {
let storage_service = self.storage_service.upgrade().ok_or_else(|| {
FlowyError::internal().with_context("The file storage service is already dropped")
})?;
Ok(storage_service)
}
/// Only expose this method for testing
#[cfg(debug_assertions)]
pub fn get_cloud_service(&self) -> &Arc<dyn DocumentCloudService> {
@ -286,7 +366,7 @@ impl DocumentManager {
}
/// Only expose this method for testing
#[cfg(debug_assertions)]
pub fn get_file_storage_service(&self) -> &Weak<dyn FileStorageService> {
pub fn get_file_storage_service(&self) -> &Weak<dyn ObjectStorageService> {
&self.storage_service
}
}

View File

@ -2,7 +2,6 @@ use std::ops::Deref;
use std::sync::Arc;
use anyhow::Error;
use bytes::Bytes;
use collab::core::collab::CollabDocState;
use collab::preclude::CollabPlugin;
use collab_document::blocks::DocumentData;
@ -23,7 +22,7 @@ use flowy_document::entities::{DocumentSnapshotData, DocumentSnapshotMeta};
use flowy_document::manager::{DocumentManager, DocumentSnapshotService, DocumentUserService};
use flowy_document_pub::cloud::*;
use flowy_error::{ErrorCode, FlowyError, FlowyResult};
use flowy_storage::{FileStorageService, StorageObject};
use flowy_storage::ObjectStorageService;
use lib_infra::async_trait::async_trait;
use lib_infra::future::{to_fut, Fut, FutureResult};
@ -35,7 +34,7 @@ impl DocumentTest {
pub fn new() -> Self {
let user = FakeUser::new();
let cloud_service = Arc::new(LocalTestDocumentCloudServiceImpl());
let file_storage = Arc::new(DocumentTestFileStorageService) as Arc<dyn FileStorageService>;
let file_storage = Arc::new(DocumentTestFileStorageService) as Arc<dyn ObjectStorageService>;
let document_snapshot = Arc::new(DocumentTestSnapshot);
let manager = DocumentManager::new(
Arc::new(user),
@ -165,16 +164,27 @@ impl DocumentCloudService for LocalTestDocumentCloudServiceImpl {
}
pub struct DocumentTestFileStorageService;
impl FileStorageService for DocumentTestFileStorageService {
fn create_object(&self, _object: StorageObject) -> FutureResult<String, FlowyError> {
impl ObjectStorageService for DocumentTestFileStorageService {
fn get_object_url(
&self,
_object_id: flowy_storage::ObjectIdentity,
) -> FutureResult<String, FlowyError> {
todo!()
}
fn delete_object_by_url(&self, _object_url: String) -> FutureResult<(), FlowyError> {
fn put_object(
&self,
_url: String,
_object_value: flowy_storage::ObjectValue,
) -> FutureResult<(), FlowyError> {
todo!()
}
fn get_object_by_url(&self, _object_url: String) -> FutureResult<Bytes, FlowyError> {
fn delete_object(&self, _url: String) -> FutureResult<(), FlowyError> {
todo!()
}
fn get_object(&self, _url: String) -> FutureResult<flowy_storage::ObjectValue, FlowyError> {
todo!()
}
}

View File

@ -0,0 +1 @@

View File

@ -1,7 +1,5 @@
use bytes::Bytes;
use flowy_error::FlowyError;
use flowy_storage::{FileStorageService, StorageObject};
use flowy_storage::{ObjectIdentity, ObjectStorageService, ObjectValue};
use lib_infra::future::FutureResult;
use crate::af_cloud::AFServer;
@ -14,19 +12,47 @@ impl<T> AFCloudFileStorageServiceImpl<T> {
}
}
impl<T> FileStorageService for AFCloudFileStorageServiceImpl<T>
impl<T> ObjectStorageService for AFCloudFileStorageServiceImpl<T>
where
T: AFServer,
{
fn create_object(&self, _object: StorageObject) -> FutureResult<String, FlowyError> {
FutureResult::new(async move { Err(FlowyError::not_support()) })
fn get_object_url(&self, object_id: ObjectIdentity) -> FutureResult<String, FlowyError> {
let try_get_client = self.0.try_get_client();
FutureResult::new(async move {
let client = try_get_client?;
let url = client.get_blob_url(&object_id.workspace_id, &object_id.file_id);
Ok(url)
})
}
fn delete_object_by_url(&self, _object_url: String) -> FutureResult<(), FlowyError> {
FutureResult::new(async move { Err(FlowyError::not_support()) })
fn put_object(&self, url: String, file: ObjectValue) -> FutureResult<(), FlowyError> {
let try_get_client = self.0.try_get_client();
let file = file.clone();
FutureResult::new(async move {
let client = try_get_client?;
client.put_blob(&url, file.raw, &file.mime).await?;
Ok(())
})
}
fn get_object_by_url(&self, _object_url: String) -> FutureResult<Bytes, FlowyError> {
FutureResult::new(async move { Err(FlowyError::not_support()) })
fn delete_object(&self, url: String) -> FutureResult<(), FlowyError> {
let try_get_client = self.0.try_get_client();
FutureResult::new(async move {
let client = try_get_client?;
client.delete_blob(&url).await?;
Ok(())
})
}
fn get_object(&self, url: String) -> FutureResult<ObjectValue, FlowyError> {
let try_get_client = self.0.try_get_client();
FutureResult::new(async move {
let client = try_get_client?;
let (mime, raw) = client.get_blob(&url).await?;
Ok(ObjectValue {
raw: raw.into(),
mime,
})
})
}
}

View File

@ -9,6 +9,7 @@ use client_api::ws::{
ConnectState, WSClient, WSClientConfig, WSConnectStateReceiver, WebSocketChannel,
};
use client_api::{Client, ClientConfiguration};
use flowy_storage::ObjectStorageService;
use tokio::sync::watch;
use tokio_stream::wrappers::WatchStream;
use tracing::{error, event, info};
@ -18,7 +19,6 @@ use flowy_document_pub::cloud::DocumentCloudService;
use flowy_error::{ErrorCode, FlowyError};
use flowy_folder_pub::cloud::FolderCloudService;
use flowy_server_pub::af_cloud_config::AFCloudConfiguration;
use flowy_storage::FileStorageService;
use flowy_user_pub::cloud::{UserCloudService, UserUpdate};
use flowy_user_pub::entities::UserTokenState;
use lib_dispatch::prelude::af_spawn;
@ -213,7 +213,7 @@ impl AppFlowyServer for AppFlowyCloudServer {
}
}
fn file_storage(&self) -> Option<Arc<dyn FileStorageService>> {
fn file_storage(&self) -> Option<Arc<dyn ObjectStorageService>> {
let client = AFServerImpl {
client: self.get_client(),
};

View File

@ -1,3 +1,4 @@
use flowy_storage::ObjectStorageService;
use std::sync::Arc;
use parking_lot::RwLock;
@ -7,7 +8,6 @@ use flowy_database_pub::cloud::DatabaseCloudService;
use flowy_document_pub::cloud::DocumentCloudService;
use flowy_error::FlowyError;
use flowy_folder_pub::cloud::FolderCloudService;
use flowy_storage::FileStorageService;
// use flowy_user::services::database::{
// get_user_profile, get_user_workspace, open_collab_db, open_user_db,
// };
@ -67,7 +67,7 @@ impl AppFlowyServer for LocalServer {
Arc::new(LocalServerDocumentCloudServiceImpl())
}
fn file_storage(&self) -> Option<Arc<dyn FileStorageService>> {
fn file_storage(&self) -> Option<Arc<dyn ObjectStorageService>> {
None
}
}

View File

@ -1,3 +1,4 @@
use flowy_storage::ObjectStorageService;
use std::sync::Arc;
use anyhow::Error;
@ -11,7 +12,6 @@ use tokio_stream::wrappers::WatchStream;
use flowy_database_pub::cloud::DatabaseCloudService;
use flowy_document_pub::cloud::DocumentCloudService;
use flowy_folder_pub::cloud::FolderCloudService;
use flowy_storage::FileStorageService;
use flowy_user_pub::cloud::UserCloudService;
use flowy_user_pub::entities::UserTokenState;
use lib_infra::future::FutureResult;
@ -131,7 +131,7 @@ pub trait AppFlowyServer: Send + Sync + 'static {
FutureResult::new(async { Ok(None) })
}
fn file_storage(&self) -> Option<Arc<dyn FileStorageService>>;
fn file_storage(&self) -> Option<Arc<dyn ObjectStorageService>>;
}
pub struct EncryptionImpl {

View File

@ -1,6 +1,7 @@
use std::borrow::Cow;
use anyhow::Error;
use flowy_storage::StorageObject;
use hyper::header::CONTENT_TYPE;
use reqwest::header::IntoHeaderName;
use reqwest::multipart::{Form, Part};
@ -12,8 +13,6 @@ use tokio::fs::File;
use tokio::io::AsyncReadExt;
use url::Url;
use flowy_storage::StorageObject;
use crate::supabase::file_storage::{DeleteObjects, FileOptions, NewBucket, RequestBody};
pub struct StorageRequestBuilder {

View File

@ -1,7 +1,6 @@
use std::sync::{Arc, Weak};
use anyhow::{anyhow, Error};
use bytes::Bytes;
use reqwest::{
header::{HeaderMap, HeaderValue},
Client,
@ -11,10 +10,9 @@ use url::Url;
use flowy_encrypt::{decrypt_data, encrypt_data};
use flowy_error::FlowyError;
use flowy_server_pub::supabase_config::SupabaseConfiguration;
use flowy_storage::{FileStoragePlan, FileStorageService, StorageObject};
use flowy_storage::{FileStoragePlan, ObjectStorageService};
use lib_infra::future::FutureResult;
use crate::response::ExtendedResponse;
use crate::supabase::file_storage::builder::StorageRequestBuilder;
use crate::AppFlowyEncryption;
@ -24,9 +22,87 @@ pub struct SupabaseFileStorage {
client: Client,
#[allow(dead_code)]
encryption: ObjectEncryption,
#[allow(dead_code)]
storage_plan: Arc<dyn FileStoragePlan>,
}
impl ObjectStorageService for SupabaseFileStorage {
fn get_object_url(
&self,
_object_id: flowy_storage::ObjectIdentity,
) -> FutureResult<String, FlowyError> {
todo!()
}
fn put_object(
&self,
_url: String,
_object_value: flowy_storage::ObjectValue,
) -> FutureResult<(), FlowyError> {
todo!()
}
fn delete_object(&self, _url: String) -> FutureResult<(), FlowyError> {
todo!()
}
fn get_object(&self, _url: String) -> FutureResult<flowy_storage::ObjectValue, FlowyError> {
todo!()
}
// fn create_object(&self, object: StorageObject) -> FutureResult<String, FlowyError> {
// let mut storage = self.storage();
// let storage_plan = Arc::downgrade(&self.storage_plan);
// FutureResult::new(async move {
// let plan = storage_plan
// .upgrade()
// .ok_or(anyhow!("Storage plan is not available"))?;
// plan.check_upload_object(&object).await?;
// storage = storage.upload_object("data", object);
// let url = storage.url.to_string();
// storage.build().await?.send().await?.success().await?;
// Ok(url)
// })
// }
// fn delete_object_by_url(&self, object_url: String) -> FutureResult<(), FlowyError> {
// let storage = self.storage();
// FutureResult::new(async move {
// let url = Url::parse(&object_url)?;
// let location = get_object_location_from(&url)?;
// storage
// .delete_object(location.bucket_id, location.file_name)
// .build()
// .await?
// .send()
// .await?
// .success()
// .await?;
// Ok(())
// })
// }
// fn get_object_by_url(&self, object_url: String) -> FutureResult<Bytes, FlowyError> {
// let storage = self.storage();
// FutureResult::new(async move {
// let url = Url::parse(&object_url)?;
// let location = get_object_location_from(&url)?;
// let bytes = storage
// .get_object(location.bucket_id, location.file_name)
// .build()
// .await?
// .send()
// .await?
// .get_bytes()
// .await?;
// Ok(bytes)
// })
// }
}
impl SupabaseFileStorage {
pub fn new(
config: &SupabaseConfiguration,
@ -61,60 +137,6 @@ impl SupabaseFileStorage {
}
}
impl FileStorageService for SupabaseFileStorage {
fn create_object(&self, object: StorageObject) -> FutureResult<String, FlowyError> {
let mut storage = self.storage();
let storage_plan = Arc::downgrade(&self.storage_plan);
FutureResult::new(async move {
let plan = storage_plan
.upgrade()
.ok_or(anyhow!("Storage plan is not available"))?;
plan.check_upload_object(&object).await?;
storage = storage.upload_object("data", object);
let url = storage.url.to_string();
storage.build().await?.send().await?.success().await?;
Ok(url)
})
}
fn delete_object_by_url(&self, object_url: String) -> FutureResult<(), FlowyError> {
let storage = self.storage();
FutureResult::new(async move {
let url = Url::parse(&object_url)?;
let location = get_object_location_from(&url)?;
storage
.delete_object(location.bucket_id, location.file_name)
.build()
.await?
.send()
.await?
.success()
.await?;
Ok(())
})
}
fn get_object_by_url(&self, object_url: String) -> FutureResult<Bytes, FlowyError> {
let storage = self.storage();
FutureResult::new(async move {
let url = Url::parse(&object_url)?;
let location = get_object_location_from(&url)?;
let bytes = storage
.get_object(location.bucket_id, location.file_name)
.build()
.await?
.send()
.await?
.get_bytes()
.await?;
Ok(bytes)
})
}
}
#[allow(dead_code)]
struct ObjectEncryption {
encryption: Weak<dyn AppFlowyEncryption>,
@ -154,11 +176,13 @@ impl ObjectEncryption {
}
}
#[allow(dead_code)]
struct ObjectLocation<'a> {
bucket_id: &'a str,
file_name: &'a str,
}
#[allow(dead_code)]
fn get_object_location_from(url: &Url) -> Result<ObjectLocation, Error> {
let mut segments = url
.path_segments()

View File

@ -1,7 +1,9 @@
use bytes::Bytes;
use serde::{Deserialize, Serialize};
use flowy_storage::ObjectValue;
use flowy_storage::ObjectValueSupabase;
use crate::supabase;
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
@ -88,12 +90,19 @@ pub enum RequestBody {
},
}
impl From<(FileOptions, ObjectValue)> for RequestBody {
fn from(params: (FileOptions, ObjectValue)) -> Self {
impl From<(FileOptions, ObjectValueSupabase)> for RequestBody {
fn from(
params: (
supabase::file_storage::entities::FileOptions,
ObjectValueSupabase,
),
) -> Self {
let (options, value) = params;
match value {
ObjectValue::File { file_path } => RequestBody::MultiPartFile { file_path, options },
ObjectValue::Bytes { bytes, mime: _ } => RequestBody::MultiPartBytes { bytes, options },
ObjectValueSupabase::File { file_path } => RequestBody::MultiPartFile { file_path, options },
ObjectValueSupabase::Bytes { bytes, mime: _ } => {
RequestBody::MultiPartBytes { bytes, options }
},
}
}
}

View File

@ -1,3 +1,4 @@
use flowy_storage::ObjectStorageService;
use std::collections::HashMap;
use std::sync::{Arc, Weak};
@ -9,7 +10,6 @@ use flowy_database_pub::cloud::DatabaseCloudService;
use flowy_document_pub::cloud::DocumentCloudService;
use flowy_folder_pub::cloud::FolderCloudService;
use flowy_server_pub::supabase_config::SupabaseConfiguration;
use flowy_storage::FileStorageService;
use flowy_user_pub::cloud::UserCloudService;
use crate::supabase::api::{
@ -187,11 +187,11 @@ impl AppFlowyServer for SupabaseServer {
)))
}
fn file_storage(&self) -> Option<Arc<dyn FileStorageService>> {
fn file_storage(&self) -> Option<Arc<dyn ObjectStorageService>> {
self
.file_storage
.read()
.clone()
.map(|s| s as Arc<dyn FileStorageService>)
.map(|s| s as Arc<dyn ObjectStorageService>)
}
}

View File

@ -1,78 +1,78 @@
use url::Url;
use uuid::Uuid;
use flowy_storage::StorageObject;
use crate::supabase_test::util::{file_storage_service, get_supabase_ci_config};
#[tokio::test]
async fn supabase_get_object_test() {
if get_supabase_ci_config().is_none() {
return;
}
let service = file_storage_service();
let file_name = format!("test-{}.txt", Uuid::new_v4());
let object = StorageObject::from_file("1", &file_name, "tests/test.txt");
// Upload a file
let url = service
.create_object(object)
.await
.unwrap()
.parse::<Url>()
.unwrap();
// The url would be something like:
// https://acfrqdbdtbsceyjbxsfc.supabase.co/storage/v1/object/data/test-1693472809.txt
let name = url.path_segments().unwrap().last().unwrap();
assert_eq!(name, &file_name);
// Download the file
let bytes = service.get_object_by_url(url.to_string()).await.unwrap();
let s = String::from_utf8(bytes.to_vec()).unwrap();
assert_eq!(s, "hello world");
}
#[tokio::test]
async fn supabase_upload_image_test() {
if get_supabase_ci_config().is_none() {
return;
}
let service = file_storage_service();
let file_name = format!("image-{}.png", Uuid::new_v4());
let object = StorageObject::from_file("1", &file_name, "tests/logo.png");
// Upload a file
let url = service
.create_object(object)
.await
.unwrap()
.parse::<Url>()
.unwrap();
// Download object by url
let bytes = service.get_object_by_url(url.to_string()).await.unwrap();
assert_eq!(bytes.len(), 15694);
}
#[tokio::test]
async fn supabase_delete_object_test() {
if get_supabase_ci_config().is_none() {
return;
}
let service = file_storage_service();
let file_name = format!("test-{}.txt", Uuid::new_v4());
let object = StorageObject::from_file("1", &file_name, "tests/test.txt");
let url = service.create_object(object).await.unwrap();
let result = service.get_object_by_url(url.clone()).await;
assert!(result.is_ok());
let _ = service.delete_object_by_url(url.clone()).await;
let result = service.get_object_by_url(url.clone()).await;
assert!(result.is_err());
}
// use url::Url;
// use uuid::Uuid;
//
// use flowy_storage::StorageObject;
//
// use crate::supabase_test::util::{file_storage_service, get_supabase_ci_config};
//
// #[tokio::test]
// async fn supabase_get_object_test() {
// if get_supabase_ci_config().is_none() {
// return;
// }
//
// let service = file_storage_service();
// let file_name = format!("test-{}.txt", Uuid::new_v4());
// let object = StorageObject::from_file("1", &file_name, "tests/test.txt");
//
// // Upload a file
// let url = service
// .create_object(object)
// .await
// .unwrap()
// .parse::<Url>()
// .unwrap();
//
// // The url would be something like:
// // https://acfrqdbdtbsceyjbxsfc.supabase.co/storage/v1/object/data/test-1693472809.txt
// let name = url.path_segments().unwrap().last().unwrap();
// assert_eq!(name, &file_name);
//
// // Download the file
// let bytes = service.get_object(url.to_string()).await.unwrap();
// let s = String::from_utf8(bytes.to_vec()).unwrap();
// assert_eq!(s, "hello world");
// }
//
// #[tokio::test]
// async fn supabase_upload_image_test() {
// if get_supabase_ci_config().is_none() {
// return;
// }
//
// let service = file_storage_service();
// let file_name = format!("image-{}.png", Uuid::new_v4());
// let object = StorageObject::from_file("1", &file_name, "tests/logo.png");
//
// // Upload a file
// let url = service
// .create_object(object)
// .await
// .unwrap()
// .parse::<Url>()
// .unwrap();
//
// // Download object by url
// let bytes = service.get_object(url.to_string()).await.unwrap();
// assert_eq!(bytes.len(), 15694);
// }
//
// #[tokio::test]
// async fn supabase_delete_object_test() {
// if get_supabase_ci_config().is_none() {
// return;
// }
//
// let service = file_storage_service();
// let file_name = format!("test-{}.txt", Uuid::new_v4());
// let object = StorageObject::from_file("1", &file_name, "tests/test.txt");
// let url = service.create_object(object).await.unwrap();
//
// let result = service.get_object(url.clone()).await;
// assert!(result.is_ok());
//
// let _ = service.delete_object(url.clone()).await;
//
// let result = service.get_object(url.clone()).await;
// assert!(result.is_err());
// }

View File

@ -1,3 +1,4 @@
use flowy_storage::ObjectStorageService;
use std::collections::HashMap;
use std::sync::Arc;
@ -17,7 +18,7 @@ use flowy_server::supabase::define::{USER_DEVICE_ID, USER_EMAIL, USER_UUID};
use flowy_server::supabase::file_storage::core::SupabaseFileStorage;
use flowy_server::{AppFlowyEncryption, EncryptionImpl};
use flowy_server_pub::supabase_config::SupabaseConfiguration;
use flowy_storage::{FileStoragePlan, FileStorageService, StorageObject};
use flowy_storage::{FileStoragePlan, StorageObject};
use flowy_user_pub::cloud::UserCloudService;
use lib_infra::future::FutureResult;
@ -60,7 +61,8 @@ pub fn folder_service() -> Arc<dyn FolderCloudService> {
Arc::new(SupabaseFolderServiceImpl::new(server))
}
pub fn file_storage_service() -> Arc<dyn FileStorageService> {
#[allow(dead_code)]
pub fn file_storage_service() -> Arc<dyn ObjectStorageService> {
let encryption_impl: Arc<dyn AppFlowyEncryption> = Arc::new(EncryptionImpl::new(None));
let config = SupabaseConfiguration::from_env().unwrap();
Arc::new(

View File

@ -15,3 +15,6 @@ mime_guess = "2.0"
lib-infra = { workspace = true }
url = "2.2.2"
flowy-error = { workspace = true, features = ["impl_from_reqwest"] }
mime = "0.3.17"
tokio.workspace = true
tracing.workspace = true

View File

@ -2,11 +2,108 @@ use bytes::Bytes;
use flowy_error::FlowyError;
use lib_infra::future::FutureResult;
use mime::Mime;
use tokio::io::AsyncReadExt;
use tracing::info;
pub struct ObjectIdentity {
pub workspace_id: String,
pub file_id: String,
}
#[derive(Clone)]
pub struct ObjectValue {
pub raw: Bytes,
pub mime: Mime,
}
impl ObjectValue {
pub async fn from_file(local_file_path: &str) -> Result<Self, FlowyError> {
let mut file = tokio::fs::File::open(local_file_path).await?;
let mut content = Vec::new();
let n = file.read_to_end(&mut content).await?;
info!("read {} bytes from file: {}", n, local_file_path);
let mime = mime_guess::from_path(local_file_path).first_or_octet_stream();
Ok(ObjectValue {
raw: content.into(),
mime,
})
}
}
/// Provides a service for object storage.
///
/// The trait includes methods for CRUD operations on storage objects.
pub trait ObjectStorageService: Send + Sync + 'static {
/// Creates a new storage object.
///
/// # Parameters
/// - `url`: url of the object to be created.
///
/// # Returns
/// - `Ok()`
/// - `Err(Error)`: An error occurred during the operation.
fn get_object_url(&self, object_id: ObjectIdentity) -> FutureResult<String, FlowyError>;
/// Creates a new storage object.
///
/// # Parameters
/// - `url`: url of the object to be created.
///
/// # Returns
/// - `Ok()`
/// - `Err(Error)`: An error occurred during the operation.
fn put_object(&self, url: String, object_value: ObjectValue) -> FutureResult<(), FlowyError>;
/// Deletes a storage object by its URL.
///
/// # Parameters
/// - `url`: url of the object to be deleted.
///
/// # Returns
/// - `Ok()`
/// - `Err(Error)`: An error occurred during the operation.
fn delete_object(&self, url: String) -> FutureResult<(), FlowyError>;
/// Fetches a storage object by its URL.
///
/// # Parameters
/// - `url`: url of the object
///
/// # Returns
/// - `Ok(File)`: The returned file object.
/// - `Err(Error)`: An error occurred during the operation.
fn get_object(&self, url: String) -> FutureResult<ObjectValue, FlowyError>;
}
pub trait FileStoragePlan: Send + Sync + 'static {
fn storage_size(&self) -> FutureResult<u64, FlowyError>;
fn maximum_file_size(&self) -> FutureResult<u64, FlowyError>;
fn check_upload_object(&self, object: &StorageObject) -> FutureResult<(), FlowyError>;
}
pub struct StorageObject {
pub workspace_id: String,
pub file_name: String,
pub value: ObjectValue,
pub value: ObjectValueSupabase,
}
pub enum ObjectValueSupabase {
File { file_path: String },
Bytes { bytes: Bytes, mime: String },
}
impl ObjectValueSupabase {
pub fn mime_type(&self) -> String {
match self {
ObjectValueSupabase::File { file_path } => mime_guess::from_path(file_path)
.first_or_octet_stream()
.to_string(),
ObjectValueSupabase::Bytes { mime, .. } => mime.clone(),
}
}
}
impl StorageObject {
@ -21,7 +118,7 @@ impl StorageObject {
Self {
workspace_id: workspace_id.to_string(),
file_name: file_name.to_string(),
value: ObjectValue::File {
value: ObjectValueSupabase::File {
file_path: file_path.to_string(),
},
}
@ -45,7 +142,7 @@ impl StorageObject {
Self {
workspace_id: workspace_id.to_string(),
file_name: file_name.to_string(),
value: ObjectValue::Bytes { bytes, mime },
value: ObjectValueSupabase::Bytes { bytes, mime },
}
}
@ -56,60 +153,8 @@ impl StorageObject {
/// The file size in bytes.
pub fn file_size(&self) -> u64 {
match &self.value {
ObjectValue::File { file_path } => std::fs::metadata(file_path).unwrap().len(),
ObjectValue::Bytes { bytes, .. } => bytes.len() as u64,
ObjectValueSupabase::File { file_path } => std::fs::metadata(file_path).unwrap().len(),
ObjectValueSupabase::Bytes { bytes, .. } => bytes.len() as u64,
}
}
}
pub enum ObjectValue {
File { file_path: String },
Bytes { bytes: Bytes, mime: String },
}
impl ObjectValue {
pub fn mime_type(&self) -> String {
match self {
ObjectValue::File { file_path } => mime_guess::from_path(file_path)
.first_or_octet_stream()
.to_string(),
ObjectValue::Bytes { mime, .. } => mime.clone(),
}
}
}
/// Provides a service for storing and managing files.
///
/// The trait includes methods for CRUD operations on storage objects.
pub trait FileStorageService: Send + Sync + 'static {
/// Creates a new storage object.
///
/// # Parameters
/// - `object`: The object to be stored.
///
/// # Returns
/// - `Ok(String)`: A url representing some kind of object identifier.
/// - `Err(Error)`: An error occurred during the operation.
fn create_object(&self, object: StorageObject) -> FutureResult<String, FlowyError>;
/// Deletes a storage object by its URL.
///
/// # Parameters
/// - `object_url`: The URL of the object to be deleted.
///
fn delete_object_by_url(&self, object_url: String) -> FutureResult<(), FlowyError>;
/// Fetches a storage object by its URL.
///
/// # Parameters
/// - `object_url`: The URL of the object to be fetched.
///
fn get_object_by_url(&self, object_url: String) -> FutureResult<Bytes, FlowyError>;
}
pub trait FileStoragePlan: Send + Sync + 'static {
fn storage_size(&self) -> FutureResult<u64, FlowyError>;
fn maximum_file_size(&self) -> FutureResult<u64, FlowyError>;
fn check_upload_object(&self, object: &StorageObject) -> FutureResult<(), FlowyError>;
}

View File

@ -1,3 +1,4 @@
use std::path::Path;
use validator::ValidationError;
pub fn required_not_empty_str(s: &str) -> Result<(), ValidationError> {
@ -6,3 +7,11 @@ pub fn required_not_empty_str(s: &str) -> Result<(), ValidationError> {
}
Ok(())
}
pub fn required_valid_path(s: &str) -> Result<(), ValidationError> {
let path = Path::new(s);
match (path.is_absolute(), path.exists()) {
(true, true) => Ok(()),
(_, _) => Err(ValidationError::new("invalid_path")),
}
}