clippy fixes in tests

This commit is contained in:
Marcel Märtens 2021-07-12 13:14:50 +02:00
parent d345a8f8f0
commit 468cfe84e7
8 changed files with 17 additions and 18 deletions

View File

@ -52,7 +52,7 @@ pub struct BotClient {
} }
pub fn make_client(runtime: &Arc<Runtime>, server: &str) -> Client { pub fn make_client(runtime: &Arc<Runtime>, server: &str) -> Client {
let runtime_clone = Arc::clone(&runtime); let runtime_clone = Arc::clone(runtime);
let addr = ConnectionArgs::Tcp { let addr = ConnectionArgs::Tcp {
prefer_ipv6: false, prefer_ipv6: false,
hostname: server.to_owned(), hostname: server.to_owned(),

View File

@ -125,7 +125,7 @@ fn weapon_stats() -> Result<(), Box<dyn Error>> {
let buff_strength = tool.base_buff_strength(&msm, &[]).to_string(); let buff_strength = tool.base_buff_strength(&msm, &[]).to_string();
let equip_time = tool.equip_time(&msm, &[]).as_secs_f32().to_string(); let equip_time = tool.equip_time(&msm, &[]).as_secs_f32().to_string();
let kind = get_tool_kind(&tool.kind); let kind = get_tool_kind(&tool.kind);
let hands = get_tool_hands(&tool); let hands = get_tool_hands(tool);
wtr.write_record(&[ wtr.write_record(&[
item.item_definition_id(), item.item_definition_id(),

View File

@ -94,7 +94,7 @@ impl ItemSpec {
/// 1) If weights are invalid /// 1) If weights are invalid
pub fn validate(&self, key: EquipSlot) { pub fn validate(&self, key: EquipSlot) {
match self { match self {
ItemSpec::Item(specifier) => std::mem::drop(Item::new_from_asset_expect(&specifier)), ItemSpec::Item(specifier) => std::mem::drop(Item::new_from_asset_expect(specifier)),
ItemSpec::Choice(items) => { ItemSpec::Choice(items) => {
for (p, entry) in items { for (p, entry) in items {
if p <= &0.0 { if p <= &0.0 {

View File

@ -119,7 +119,7 @@ mod tests {
for (_, item) in table.iter() { for (_, item) in table.iter() {
match item { match item {
LootSpec::Item(item) => { LootSpec::Item(item) => {
Item::new_from_asset_expect(&item); Item::new_from_asset_expect(item);
}, },
LootSpec::ItemQuantity(item, lower, upper) => { LootSpec::ItemQuantity(item, lower, upper) => {
assert!( assert!(
@ -134,11 +134,11 @@ mod tests {
upper, upper,
lower lower
); );
Item::new_from_asset_expect(&item); Item::new_from_asset_expect(item);
}, },
LootSpec::LootTable(loot_table) => { LootSpec::LootTable(loot_table) => {
let loot_table = let loot_table =
Lottery::<LootSpec<String>>::load_expect_cloned(&loot_table); Lottery::<LootSpec<String>>::load_expect_cloned(loot_table);
validate_table_contents(loot_table); validate_table_contents(loot_table);
}, },
} }

View File

@ -115,7 +115,7 @@ fn read_file_from_path<'a>(
reference.name() reference.name()
) )
}) })
.to_object(&repo) .to_object(repo)
.unwrap() .unwrap()
.peel_to_blob() .peel_to_blob()
.expect("Impossible to fetch the Git object") .expect("Impossible to fetch the Git object")
@ -230,7 +230,7 @@ fn complete_key_versions<'a>(
let full_path = i18n_file.path(); let full_path = i18n_file.path();
let path = full_path.strip_prefix(root_dir).unwrap(); let path = full_path.strip_prefix(root_dir).unwrap();
let i18n_blob = read_file_from_path(&repo, &head_ref, &path); let i18n_blob = read_file_from_path(repo, head_ref, path);
let i18n: LocalizationFragment = let i18n: LocalizationFragment =
from_bytes(i18n_blob.content()).unwrap_or_else(|e| { from_bytes(i18n_blob.content()).unwrap_or_else(|e| {
panic!( panic!(
@ -239,7 +239,7 @@ fn complete_key_versions<'a>(
e e
) )
}); });
i18n_key_versions.extend(generate_key_version(&repo, &i18n, &path, &i18n_blob)); i18n_key_versions.extend(generate_key_version(repo, &i18n, path, &i18n_blob));
} else if file_type.is_dir() { } else if file_type.is_dir() {
// If it's a directory, recursively check it // If it's a directory, recursively check it
complete_key_versions( complete_key_versions(
@ -303,7 +303,7 @@ fn test_localization_directory(
println!("-----------------------------------"); println!("-----------------------------------");
// Find the localization entry state // Find the localization entry state
let current_blob = read_file_from_path(&repo, &head_ref, &relfile); let current_blob = read_file_from_path(repo, head_ref, &relfile);
let current_loc: RawLocalization = from_bytes(current_blob.content()).unwrap_or_else(|e| { let current_loc: RawLocalization = from_bytes(current_blob.content()).unwrap_or_else(|e| {
panic!( panic!(
"Could not parse {} RON file, skipping: {}", "Could not parse {} RON file, skipping: {}",
@ -324,12 +324,12 @@ fn test_localization_directory(
); );
// Comparing with reference localization // Comparing with reference localization
fill_info(&mut current_i18n, &i18n_references, repo, &relfile); fill_info(&mut current_i18n, i18n_references, repo, &relfile);
let mut state_map = LocalizationAnalysis::default(); let mut state_map = LocalizationAnalysis::default();
let result = gather_results(current_i18n, &mut state_map); let result = gather_results(current_i18n, &mut state_map);
print_translation_stats( print_translation_stats(
&i18n_references, i18n_references,
&result, &result,
&mut state_map, &mut state_map,
be_verbose, be_verbose,

View File

@ -420,7 +420,7 @@ mod tests {
// Generate paths // Generate paths
let i18n_asset_path = std::path::Path::new("assets/voxygen/i18n/"); let i18n_asset_path = std::path::Path::new("assets/voxygen/i18n/");
let root_dir = assets::find_root().expect("Failed to discover repository root"); let root_dir = assets::find_root().expect("Failed to discover repository root");
crate::verification::verify_all_localizations(&root_dir, &i18n_asset_path); crate::verification::verify_all_localizations(&root_dir, i18n_asset_path);
} }
// Test to verify all languages and print missing and faulty localisation // Test to verify all languages and print missing and faulty localisation
@ -432,6 +432,6 @@ mod tests {
// Generate paths // Generate paths
let i18n_asset_path = std::path::Path::new("assets/voxygen/i18n/"); let i18n_asset_path = std::path::Path::new("assets/voxygen/i18n/");
let root_dir = assets::find_root().expect("Failed to discover repository root"); let root_dir = assets::find_root().expect("Failed to discover repository root");
crate::analysis::test_all_localizations(&root_dir, &i18n_asset_path, be_verbose); crate::analysis::test_all_localizations(&root_dir, i18n_asset_path, be_verbose);
} }
} }

View File

@ -35,7 +35,7 @@ use veloren_world::{
fn lz4_with_dictionary(data: &[u8], dictionary: &[u8]) -> Vec<u8> { fn lz4_with_dictionary(data: &[u8], dictionary: &[u8]) -> Vec<u8> {
let mut compressed = Vec::new(); let mut compressed = Vec::new();
lz_fear::CompressionSettings::default() lz_fear::CompressionSettings::default()
.dictionary(0, &dictionary) .dictionary(0, dictionary)
.compress(data, &mut compressed) .compress(data, &mut compressed)
.unwrap(); .unwrap();
compressed compressed

View File

@ -53,8 +53,7 @@ fn main() -> Result {
for z in aabb.min.z..aabb.max.z { for z in aabb.min.z..aabb.max.z {
let pos = Vec3::new(x, y, z); let pos = Vec3::new(x, y, z);
if let Some(block) = fill.sample_at(&prim_tree, prim, pos, &canvas) if let Some(block) = fill.sample_at(&prim_tree, prim, pos, canvas) {
{
let _ = volume.set(pos, block); let _ = volume.set(pos, block);
} }
} }
@ -152,7 +151,7 @@ impl ExportVol {
})?; })?;
write_chunk(file, "XYZI", &|file| { write_chunk(file, "XYZI", &|file| {
write_i32(file, model.len() as i32 / 4)?; // Number of voxels write_i32(file, model.len() as i32 / 4)?; // Number of voxels
file.write_all(&model) file.write_all(model)
})?; })?;
} }