mirror of
https://gitlab.com/veloren/veloren.git
synced 2024-08-30 18:12:32 +00:00
clippy fixes in tests
This commit is contained in:
parent
d345a8f8f0
commit
468cfe84e7
@ -52,7 +52,7 @@ pub struct BotClient {
|
||||
}
|
||||
|
||||
pub fn make_client(runtime: &Arc<Runtime>, server: &str) -> Client {
|
||||
let runtime_clone = Arc::clone(&runtime);
|
||||
let runtime_clone = Arc::clone(runtime);
|
||||
let addr = ConnectionArgs::Tcp {
|
||||
prefer_ipv6: false,
|
||||
hostname: server.to_owned(),
|
||||
|
@ -125,7 +125,7 @@ fn weapon_stats() -> Result<(), Box<dyn Error>> {
|
||||
let buff_strength = tool.base_buff_strength(&msm, &[]).to_string();
|
||||
let equip_time = tool.equip_time(&msm, &[]).as_secs_f32().to_string();
|
||||
let kind = get_tool_kind(&tool.kind);
|
||||
let hands = get_tool_hands(&tool);
|
||||
let hands = get_tool_hands(tool);
|
||||
|
||||
wtr.write_record(&[
|
||||
item.item_definition_id(),
|
||||
|
@ -94,7 +94,7 @@ impl ItemSpec {
|
||||
/// 1) If weights are invalid
|
||||
pub fn validate(&self, key: EquipSlot) {
|
||||
match self {
|
||||
ItemSpec::Item(specifier) => std::mem::drop(Item::new_from_asset_expect(&specifier)),
|
||||
ItemSpec::Item(specifier) => std::mem::drop(Item::new_from_asset_expect(specifier)),
|
||||
ItemSpec::Choice(items) => {
|
||||
for (p, entry) in items {
|
||||
if p <= &0.0 {
|
||||
|
@ -119,7 +119,7 @@ mod tests {
|
||||
for (_, item) in table.iter() {
|
||||
match item {
|
||||
LootSpec::Item(item) => {
|
||||
Item::new_from_asset_expect(&item);
|
||||
Item::new_from_asset_expect(item);
|
||||
},
|
||||
LootSpec::ItemQuantity(item, lower, upper) => {
|
||||
assert!(
|
||||
@ -134,11 +134,11 @@ mod tests {
|
||||
upper,
|
||||
lower
|
||||
);
|
||||
Item::new_from_asset_expect(&item);
|
||||
Item::new_from_asset_expect(item);
|
||||
},
|
||||
LootSpec::LootTable(loot_table) => {
|
||||
let loot_table =
|
||||
Lottery::<LootSpec<String>>::load_expect_cloned(&loot_table);
|
||||
Lottery::<LootSpec<String>>::load_expect_cloned(loot_table);
|
||||
validate_table_contents(loot_table);
|
||||
},
|
||||
}
|
||||
|
@ -115,7 +115,7 @@ fn read_file_from_path<'a>(
|
||||
reference.name()
|
||||
)
|
||||
})
|
||||
.to_object(&repo)
|
||||
.to_object(repo)
|
||||
.unwrap()
|
||||
.peel_to_blob()
|
||||
.expect("Impossible to fetch the Git object")
|
||||
@ -230,7 +230,7 @@ fn complete_key_versions<'a>(
|
||||
|
||||
let full_path = i18n_file.path();
|
||||
let path = full_path.strip_prefix(root_dir).unwrap();
|
||||
let i18n_blob = read_file_from_path(&repo, &head_ref, &path);
|
||||
let i18n_blob = read_file_from_path(repo, head_ref, path);
|
||||
let i18n: LocalizationFragment =
|
||||
from_bytes(i18n_blob.content()).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
@ -239,7 +239,7 @@ fn complete_key_versions<'a>(
|
||||
e
|
||||
)
|
||||
});
|
||||
i18n_key_versions.extend(generate_key_version(&repo, &i18n, &path, &i18n_blob));
|
||||
i18n_key_versions.extend(generate_key_version(repo, &i18n, path, &i18n_blob));
|
||||
} else if file_type.is_dir() {
|
||||
// If it's a directory, recursively check it
|
||||
complete_key_versions(
|
||||
@ -303,7 +303,7 @@ fn test_localization_directory(
|
||||
println!("-----------------------------------");
|
||||
|
||||
// Find the localization entry state
|
||||
let current_blob = read_file_from_path(&repo, &head_ref, &relfile);
|
||||
let current_blob = read_file_from_path(repo, head_ref, &relfile);
|
||||
let current_loc: RawLocalization = from_bytes(current_blob.content()).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"Could not parse {} RON file, skipping: {}",
|
||||
@ -324,12 +324,12 @@ fn test_localization_directory(
|
||||
);
|
||||
|
||||
// Comparing with reference localization
|
||||
fill_info(&mut current_i18n, &i18n_references, repo, &relfile);
|
||||
fill_info(&mut current_i18n, i18n_references, repo, &relfile);
|
||||
|
||||
let mut state_map = LocalizationAnalysis::default();
|
||||
let result = gather_results(current_i18n, &mut state_map);
|
||||
print_translation_stats(
|
||||
&i18n_references,
|
||||
i18n_references,
|
||||
&result,
|
||||
&mut state_map,
|
||||
be_verbose,
|
||||
|
@ -420,7 +420,7 @@ mod tests {
|
||||
// Generate paths
|
||||
let i18n_asset_path = std::path::Path::new("assets/voxygen/i18n/");
|
||||
let root_dir = assets::find_root().expect("Failed to discover repository root");
|
||||
crate::verification::verify_all_localizations(&root_dir, &i18n_asset_path);
|
||||
crate::verification::verify_all_localizations(&root_dir, i18n_asset_path);
|
||||
}
|
||||
|
||||
// Test to verify all languages and print missing and faulty localisation
|
||||
@ -432,6 +432,6 @@ mod tests {
|
||||
// Generate paths
|
||||
let i18n_asset_path = std::path::Path::new("assets/voxygen/i18n/");
|
||||
let root_dir = assets::find_root().expect("Failed to discover repository root");
|
||||
crate::analysis::test_all_localizations(&root_dir, &i18n_asset_path, be_verbose);
|
||||
crate::analysis::test_all_localizations(&root_dir, i18n_asset_path, be_verbose);
|
||||
}
|
||||
}
|
||||
|
@ -35,7 +35,7 @@ use veloren_world::{
|
||||
fn lz4_with_dictionary(data: &[u8], dictionary: &[u8]) -> Vec<u8> {
|
||||
let mut compressed = Vec::new();
|
||||
lz_fear::CompressionSettings::default()
|
||||
.dictionary(0, &dictionary)
|
||||
.dictionary(0, dictionary)
|
||||
.compress(data, &mut compressed)
|
||||
.unwrap();
|
||||
compressed
|
||||
|
@ -53,8 +53,7 @@ fn main() -> Result {
|
||||
for z in aabb.min.z..aabb.max.z {
|
||||
let pos = Vec3::new(x, y, z);
|
||||
|
||||
if let Some(block) = fill.sample_at(&prim_tree, prim, pos, &canvas)
|
||||
{
|
||||
if let Some(block) = fill.sample_at(&prim_tree, prim, pos, canvas) {
|
||||
let _ = volume.set(pos, block);
|
||||
}
|
||||
}
|
||||
@ -152,7 +151,7 @@ impl ExportVol {
|
||||
})?;
|
||||
write_chunk(file, "XYZI", &|file| {
|
||||
write_i32(file, model.len() as i32 / 4)?; // Number of voxels
|
||||
file.write_all(&model)
|
||||
file.write_all(model)
|
||||
})?;
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user