chore: support Rust 1.75 (#4246)

* chore: fix wanrings

* chore: update rust version
This commit is contained in:
Nathan.fooo 2023-12-30 01:22:06 +08:00 committed by GitHub
parent 206ccb2c43
commit 50694bb589
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
31 changed files with 42 additions and 64 deletions

View File

@ -4,7 +4,7 @@ on: workflow_dispatch
env:
FLUTTER_VERSION: "3.18.0-0.2.pre"
RUST_TOOLCHAIN: "1.70"
RUST_TOOLCHAIN: "1.75"
jobs:
build:

View File

@ -19,7 +19,7 @@ on:
env:
FLUTTER_VERSION: "3.18.0-0.2.pre"
RUST_TOOLCHAIN: "1.70"
RUST_TOOLCHAIN: "1.75"
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}

View File

@ -7,7 +7,7 @@ on:
env:
FLUTTER_VERSION: "3.18.0-0.2.pre"
RUST_TOOLCHAIN: "1.70"
RUST_TOOLCHAIN: "1.75"
jobs:
create-release:

View File

@ -21,7 +21,7 @@ on:
env:
CARGO_TERM_COLOR: always
RUST_TOOLCHAIN: "1.70"
RUST_TOOLCHAIN: "1.75"
jobs:
test-on-ubuntu:
@ -101,5 +101,5 @@ jobs:
working-directory: frontend/rust-lib/
- name: clippy rust-lib
run: cargo clippy --all
run: cargo clippy --all-targets -- -D warnings
working-directory: frontend/rust-lib

View File

@ -12,7 +12,7 @@ on:
env:
CARGO_TERM_COLOR: always
FLUTTER_VERSION: "3.18.0-0.2.pre"
RUST_TOOLCHAIN: "1.70"
RUST_TOOLCHAIN: "1.75"
jobs:
tests:

View File

@ -9,7 +9,7 @@ on:
env:
NODE_VERSION: "18.16.0"
PNPM_VERSION: "8.5.0"
RUST_TOOLCHAIN: "1.70"
RUST_TOOLCHAIN: "1.75"
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}

View File

@ -1,2 +1,2 @@
[toolchain]
channel = "1.70"
channel = "1.75"

View File

@ -32,7 +32,7 @@ pub fn parse_document_data(document: OpenDocumentData) -> ParseDocumentData {
let page_block = blocks.get(&page_id).unwrap();
let children_id = page_block.children_id.clone();
let children = children_map.get(&children_id).unwrap();
let block_id = children.children.get(0).unwrap().to_string();
let block_id = children.children.first().unwrap().to_string();
ParseDocumentData {
doc_id,
page_id,

View File

@ -1,6 +1,6 @@
pub(crate) fn invalid_email_test_case() -> Vec<String> {
// https://gist.github.com/cjaoude/fd9910626629b53c4d25
vec![
[
"annie@",
"annie@gmail@",
"#@%^%#$@#$@#.com",
@ -11,14 +11,6 @@ pub(crate) fn invalid_email_test_case() -> Vec<String> {
"email@-example.com",
"email@example..com",
"あいうえお@example.com",
/* The following email is valid according to the validate_email function return
* ".email@example.com",
* "email.@example.com",
* "email..email@example.com",
* "email@example",
* "email@example.web",
* "email@111.222.333.44444",
* "Abc..123@example.com", */
]
.iter()
.map(|s| s.to_string())
@ -26,7 +18,7 @@ pub(crate) fn invalid_email_test_case() -> Vec<String> {
}
pub(crate) fn invalid_password_test_case() -> Vec<String> {
vec!["123456", "1234".repeat(100).as_str()]
["123456", "1234".repeat(100).as_str()]
.iter()
.map(|s| s.to_string())
.collect::<Vec<_>>()

View File

@ -21,7 +21,6 @@ use flowy_user_deps::cloud::UserCloudServiceProvider;
use lib_dispatch::prelude::*;
use lib_dispatch::runtime::AFPluginRuntime;
use module::make_plugins;
pub use module::*;
use crate::config::AppFlowyCoreConfig;
use crate::deps_resolve::collab_backup::RocksdbBackupImpl;

View File

@ -1304,12 +1304,7 @@ impl DatabaseViewOperation for DatabaseViewOperationImpl {
if let (Some(database), Some(editor_by_view_id)) =
(weak_database.upgrade(), weak_editor_by_view_id.upgrade())
{
let view_editors = editor_by_view_id
.read()
.await
.values()
.map(|editor| editor.clone())
.collect();
let view_editors = editor_by_view_id.read().await.values().cloned().collect();
let _ =
update_field_type_option_fn(&database, &view_editors, type_option_data, old_field).await;
}

View File

@ -7,7 +7,6 @@ mod select_type_option;
mod single_select_type_option;
mod type_option_transform;
pub use checklist_filter::*;
pub use multi_select_type_option::*;
pub use select_ids::*;
pub use select_option::*;

View File

@ -280,6 +280,6 @@ pub fn default_type_option_data_from_type(field_type: &FieldType) -> TypeOptionD
FieldType::MultiSelect => MultiSelectTypeOption::default().into(),
FieldType::Checkbox => CheckboxTypeOption::default().into(),
FieldType::URL => URLTypeOption::default().into(),
FieldType::Checklist => ChecklistTypeOption::default().into(),
FieldType::Checklist => ChecklistTypeOption.into(),
}
}

View File

@ -369,9 +369,7 @@ fn filter_row(
cell_filter_cache: &CellFilterCache,
) -> Option<(RowId, bool)> {
// Create a filter result cache if it's not exist
let mut filter_result = result_by_row_id
.entry(row.id.clone())
.or_insert_with(FilterResult::default);
let mut filter_result = result_by_row_id.entry(row.id.clone()).or_default();
let old_is_visible = filter_result.is_visible();
// Iterate each cell of the row to check its visibility

View File

@ -115,14 +115,14 @@ where
let no_status_group_rows = other_group_changesets
.iter()
.flat_map(|changeset| &changeset.deleted_rows)
.cloned()
.filter(|row_id| {
.filter(|&row_id| {
// if the [other_group_inserted_row] contains the row_id of the row
// which means the row should not move to the default group.
!other_group_inserted_row
.iter()
.any(|inserted_row| &inserted_row.row_meta.id == row_id)
})
.cloned()
.collect::<Vec<String>>();
let mut changeset = GroupRowsNotificationPB::new(no_status_group.id.clone());

View File

@ -223,10 +223,8 @@ impl GroupCustomize for DateGroupController {
deleted_group = Some(GroupPB::from(group.clone()));
}
}
if deleted_group.is_some() {
let _ = self
.context
.delete_group(&deleted_group.as_ref().unwrap().group_id);
if let Some(delete_group) = deleted_group.as_ref() {
let _ = self.context.delete_group(&delete_group.group_id);
}
deleted_group
}

View File

@ -179,10 +179,8 @@ impl GroupCustomize for URLGroupController {
deleted_group = Some(GroupPB::from(group.clone()));
}
}
if deleted_group.is_some() {
let _ = self
.context
.delete_group(&deleted_group.as_ref().unwrap().group_id);
if let Some(deleted_group) = deleted_group.as_ref() {
let _ = self.context.delete_group(&deleted_group.group_id);
}
deleted_group
}

View File

@ -30,9 +30,10 @@ impl TryFrom<SortMap> for Sort {
value.get_i64_value(FIELD_TYPE).map(FieldType::from),
) {
(Some(id), Some(field_id), Some(field_type)) => {
let condition =
SortCondition::try_from(value.get_i64_value(SORT_CONDITION).unwrap_or_default())
.unwrap_or_default();
let condition = value
.get_i64_value(SORT_CONDITION)
.map(SortCondition::from)
.unwrap_or_default();
Ok(Self {
id,
field_id,

View File

@ -19,7 +19,7 @@ async fn grid_cell_update() {
let rows = &test.row_details;
let mut scripts = vec![];
for (_, row_detail) in rows.iter().enumerate() {
for row_detail in rows.iter() {
for field in &fields {
let field_type = FieldType::from(field.field_type);
if field_type == FieldType::LastEditedTime || field_type == FieldType::CreatedTime {

View File

@ -195,7 +195,7 @@ async fn grid_switch_from_multi_select_to_text_test() {
from_field_type: FieldType::MultiSelect,
expected_content: format!(
"{},{}",
multi_select_type_option.get(0).unwrap().name,
multi_select_type_option.first().unwrap().name,
multi_select_type_option.get(1).unwrap().name
),
}];

View File

@ -196,7 +196,7 @@ async fn change_date_on_moving_row_to_another_group() {
let group = test.group_at_index(2).await;
let rows = group.clone().rows;
let row_id = &rows.get(0).unwrap().id;
let row_id = &rows.first().unwrap().id;
let row_detail = test
.get_rows()
.await

View File

@ -47,7 +47,7 @@ async fn group_move_row_test() {
AssertRow {
group_index: 1,
row_index: 1,
row: group.rows.get(0).unwrap().clone(),
row: group.rows.first().unwrap().clone(),
},
];
test.run_scripts(scripts).await;
@ -75,7 +75,7 @@ async fn group_move_row_to_other_group_test() {
AssertRow {
group_index: 2,
row_index: 1,
row: group.rows.get(0).unwrap().clone(),
row: group.rows.first().unwrap().clone(),
},
];
test.run_scripts(scripts).await;
@ -104,7 +104,7 @@ async fn group_move_two_row_to_other_group_test() {
AssertRow {
group_index: 2,
row_index: 1,
row: group_1.rows.get(0).unwrap().clone(),
row: group_1.rows.first().unwrap().clone(),
},
];
test.run_scripts(scripts).await;
@ -129,7 +129,7 @@ async fn group_move_two_row_to_other_group_test() {
AssertRow {
group_index: 2,
row_index: 1,
row: group_1.rows.get(0).unwrap().clone(),
row: group_1.rows.first().unwrap().clone(),
},
];
test.run_scripts(scripts).await;
@ -150,7 +150,7 @@ async fn group_move_row_to_other_group_and_reorder_from_up_to_down_test() {
AssertRow {
group_index: 2,
row_index: 1,
row: group_1.rows.get(0).unwrap().clone(),
row: group_1.rows.first().unwrap().clone(),
},
];
test.run_scripts(scripts).await;
@ -165,7 +165,7 @@ async fn group_move_row_to_other_group_and_reorder_from_up_to_down_test() {
AssertRow {
group_index: 2,
row_index: 2,
row: group_2.rows.get(0).unwrap().clone(),
row: group_2.rows.first().unwrap().clone(),
},
];
test.run_scripts(scripts).await;

View File

@ -118,7 +118,7 @@ pub fn make_test_board() -> DatabaseData {
// let option1 = SelectOption::with_color(FIRST_THING, SelectOptionColor::Purple);
// let option2 = SelectOption::with_color(SECOND_THING, SelectOptionColor::Orange);
// let option3 = SelectOption::with_color(THIRD_THING, SelectOptionColor::Yellow);
let type_option = ChecklistTypeOption::default();
let type_option = ChecklistTypeOption;
// type_option.options.extend(vec![option1, option2, option3]);
let checklist_field = FieldBuilder::new(field_type, type_option)
.name("TODO")

View File

@ -120,7 +120,7 @@ pub fn make_test_grid() -> DatabaseData {
// let option1 = SelectOption::with_color(FIRST_THING, SelectOptionColor::Purple);
// let option2 = SelectOption::with_color(SECOND_THING, SelectOptionColor::Orange);
// let option3 = SelectOption::with_color(THIRD_THING, SelectOptionColor::Yellow);
let type_option = ChecklistTypeOption::default();
let type_option = ChecklistTypeOption;
// type_option.options.extend(vec![option1, option2, option3]);
let checklist_field = FieldBuilder::new(field_type, type_option)
.name("TODO")

View File

@ -26,6 +26,7 @@ impl MutexDocument {
/// # Returns
/// * `Result<Document, FlowyError>` - a Result containing either a new Document object or an Error if the document creation failed
pub fn open(doc_id: &str, collab: Arc<MutexCollab>) -> FlowyResult<Self> {
#[allow(clippy::arc_with_non_send_sync)]
let document = Document::open(collab.clone()).map(|inner| Self(Arc::new(Mutex::new(inner))))?;
subscribe_document_changed(doc_id, &document);
subscribe_document_snapshot_state(&collab);
@ -41,6 +42,7 @@ impl MutexDocument {
/// # Returns
/// * `Result<Document, FlowyError>` - a Result containing either a new Document object or an Error if the document creation failed
pub fn create_with_data(collab: Arc<MutexCollab>, data: DocumentData) -> FlowyResult<Self> {
#[allow(clippy::arc_with_non_send_sync)]
let document =
Document::create_with_data(collab, data).map(|inner| Self(Arc::new(Mutex::new(inner))))?;
Ok(document)

View File

@ -26,7 +26,7 @@ fn convert_json_to_document() {
let page_block = blocks.get(&page_id).unwrap();
let page_children = children_map.get(&page_block.children_id).unwrap();
assert_eq!(page_children.children.len(), 1);
let paragraph1 = blocks.get(page_children.children.get(0).unwrap()).unwrap();
let paragraph1 = blocks.get(page_children.children.first().unwrap()).unwrap();
assert_eq!(paragraph1.ty, "paragraph1");
assert_eq!(paragraph1.parent_id, page_block.id);
}

View File

@ -71,7 +71,7 @@ async fn document_data_to_json_with_range_2_test() {
let start_block_id = children_map
.get(start_block_parent.children.as_str())
.unwrap()
.get(0)
.first()
.unwrap();
let start = Selection {
@ -89,7 +89,7 @@ async fn document_data_to_json_with_range_2_test() {
let end_block_children = children_map
.get(end_block_parent.children.as_str())
.unwrap();
let end_block_id = end_block_children.get(0).unwrap();
let end_block_id = end_block_children.first().unwrap();
let end = Selection {
block_id: end_block_id.to_string(),
index: 0,

View File

@ -1,3 +1 @@
mod trash_id;
pub use trash_id::*;

View File

@ -271,8 +271,6 @@ where
}
fn reset_workspace(&self, collab_object: CollabObject) -> FutureResult<(), Error> {
let collab_object = collab_object;
let try_get_postgrest = self.server.try_get_weak_postgrest();
let (tx, rx) = channel();
let init_update = default_workspace_doc_state(&collab_object);

View File

@ -8,4 +8,4 @@ mod pragma;
pub use database::*;
pub use pool::*;
pub use errors::{Error, ErrorKind, Result};
pub use errors::Error;

View File

@ -1,2 +1,2 @@
[toolchain]
channel = "1.70"
channel = "1.75"