update toolchain to nightly-2021-03-22

This commit is contained in:
Marcel Märtens 2021-03-12 12:10:42 +01:00
parent 759c423595
commit 6b23101fac
39 changed files with 146 additions and 158 deletions

View File

@ -13,6 +13,6 @@ reorder_impl_items = true
fn_single_line = true fn_single_line = true
inline_attribute_width = 50 inline_attribute_width = 50
match_block_trailing_comma = true match_block_trailing_comma = true
merge_imports = true imports_granularity="Crate"
overflow_delimited_expr = true overflow_delimited_expr = true
use_field_init_shorthand = true use_field_init_shorthand = true

2
Cargo.lock generated
View File

@ -1,5 +1,7 @@
# This file is automatically @generated by Cargo. # This file is automatically @generated by Cargo.
# It is not intended for manual editing. # It is not intended for manual editing.
version = 3
[[package]] [[package]]
name = "ab_glyph" name = "ab_glyph"
version = "0.2.10" version = "0.2.10"

View File

@ -1,6 +1,6 @@
#![deny(unsafe_code)] #![deny(unsafe_code)]
#![deny(clippy::clone_on_ref_ptr)] #![deny(clippy::clone_on_ref_ptr)]
#![feature(label_break_value, option_zip, str_split_once)] #![feature(label_break_value, option_zip)]
pub mod addr; pub mod addr;
pub mod cmd; pub mod cmd;
@ -1045,7 +1045,7 @@ impl Client {
where where
C: Clone, C: Clone,
{ {
Some(self.state.read_storage::<C>().get(self.entity()).cloned()?) self.state.read_storage::<C>().get(self.entity()).cloned()
} }
pub fn current_biome(&self) -> BiomeKind { pub fn current_biome(&self) -> BiomeKind {

View File

@ -133,18 +133,18 @@ impl ClientMsg {
end of 2nd level Enums end of 2nd level Enums
*/ */
impl Into<ClientMsg> for ClientType { impl From<ClientType> for ClientMsg {
fn into(self) -> ClientMsg { ClientMsg::Type(self) } fn from(other: ClientType) -> ClientMsg { ClientMsg::Type(other) }
} }
impl Into<ClientMsg> for ClientRegister { impl From<ClientRegister> for ClientMsg {
fn into(self) -> ClientMsg { ClientMsg::Register(self) } fn from(other: ClientRegister) -> ClientMsg { ClientMsg::Register(other) }
} }
impl Into<ClientMsg> for ClientGeneral { impl From<ClientGeneral> for ClientMsg {
fn into(self) -> ClientMsg { ClientMsg::General(self) } fn from(other: ClientGeneral) -> ClientMsg { ClientMsg::General(other) }
} }
impl Into<ClientMsg> for PingMsg { impl From<PingMsg> for ClientMsg {
fn into(self) -> ClientMsg { ClientMsg::Ping(self) } fn from(other: PingMsg) -> ClientMsg { ClientMsg::Ping(other) }
} }

View File

@ -268,22 +268,22 @@ impl From<comp::ChatMsg> for ServerGeneral {
fn from(v: comp::ChatMsg) -> Self { ServerGeneral::ChatMsg(v) } fn from(v: comp::ChatMsg) -> Self { ServerGeneral::ChatMsg(v) }
} }
impl Into<ServerMsg> for ServerInfo { impl From<ServerInfo> for ServerMsg {
fn into(self) -> ServerMsg { ServerMsg::Info(self) } fn from(o: ServerInfo) -> ServerMsg { ServerMsg::Info(o) }
} }
impl Into<ServerMsg> for ServerInit { impl From<ServerInit> for ServerMsg {
fn into(self) -> ServerMsg { ServerMsg::Init(Box::new(self)) } fn from(o: ServerInit) -> ServerMsg { ServerMsg::Init(Box::new(o)) }
} }
impl Into<ServerMsg> for ServerRegisterAnswer { impl From<ServerRegisterAnswer> for ServerMsg {
fn into(self) -> ServerMsg { ServerMsg::RegisterAnswer(self) } fn from(o: ServerRegisterAnswer) -> ServerMsg { ServerMsg::RegisterAnswer(o) }
} }
impl Into<ServerMsg> for ServerGeneral { impl From<ServerGeneral> for ServerMsg {
fn into(self) -> ServerMsg { ServerMsg::General(self) } fn from(o: ServerGeneral) -> ServerMsg { ServerMsg::General(o) }
} }
impl Into<ServerMsg> for PingMsg { impl From<PingMsg> for ServerMsg {
fn into(self) -> ServerMsg { ServerMsg::Ping(self) } fn from(o: PingMsg) -> ServerMsg { ServerMsg::Ping(o) }
} }

View File

@ -211,19 +211,17 @@ lazy_static! {
pub fn path_of(specifier: &str, ext: &str) -> PathBuf { ASSETS.source().path_of(specifier, ext) } pub fn path_of(specifier: &str, ext: &str) -> PathBuf { ASSETS.source().path_of(specifier, ext) }
fn get_dir_files(files: &mut Vec<String>, path: &Path, specifier: &str) -> io::Result<()> { fn get_dir_files(files: &mut Vec<String>, path: &Path, specifier: &str) -> io::Result<()> {
for entry in fs::read_dir(path)? { for entry in (fs::read_dir(path)?).flatten() {
if let Ok(entry) = entry { let path = entry.path();
let path = entry.path(); let maybe_stem = path.file_stem().and_then(|stem| stem.to_str());
let maybe_stem = path.file_stem().and_then(|stem| stem.to_str());
if let Some(stem) = maybe_stem { if let Some(stem) = maybe_stem {
let specifier = format!("{}.{}", specifier, stem); let specifier = format!("{}.{}", specifier, stem);
if path.is_dir() { if path.is_dir() {
get_dir_files(files, &path, &specifier)?; get_dir_files(files, &path, &specifier)?;
} else { } else {
files.push(specifier); files.push(specifier);
}
} }
} }
} }

View File

@ -289,8 +289,8 @@ impl SpeechBubble {
let timeout = Instant::now() + Duration::from_secs_f64(SpeechBubble::DEFAULT_DURATION); let timeout = Instant::now() + Duration::from_secs_f64(SpeechBubble::DEFAULT_DURATION);
Self { Self {
message, message,
timeout,
icon, icon,
timeout,
} }
} }
@ -299,8 +299,8 @@ impl SpeechBubble {
let timeout = Instant::now() + Duration::from_secs_f64(SpeechBubble::DEFAULT_DURATION); let timeout = Instant::now() + Duration::from_secs_f64(SpeechBubble::DEFAULT_DURATION);
Self { Self {
message, message,
timeout,
icon, icon,
timeout,
} }
} }

View File

@ -261,7 +261,7 @@ fn make_tagexample_def(
modkind.identifier_name(), modkind.identifier_name(),
toolkind.identifier_name(), toolkind.identifier_name(),
); );
let tag = ModularComponentTag { modkind, toolkind }; let tag = ModularComponentTag { toolkind, modkind };
// TODO: i18n // TODO: i18n
let name = format!("Any {}", tag.name()); let name = format!("Any {}", tag.name());
let description = format!( let description = format!(
@ -293,7 +293,7 @@ fn initialize_modular_assets() -> (HashMap<String, RawItemDef>, RawRecipeBook) {
for &modkind in &MODKINDS { for &modkind in &MODKINDS {
for tier in 0..=5 { for tier in 0..=5 {
let (identifier, item) = make_component_def(toolkind, modkind, tier); let (identifier, item) = make_component_def(toolkind, modkind, tier);
let tag = ModularComponentTag { modkind, toolkind }; let tag = ModularComponentTag { toolkind, modkind };
exemplars exemplars
.entry(tag) .entry(tag)
.or_insert_with(Vec::new) .or_insert_with(Vec::new)

View File

@ -285,8 +285,9 @@ impl From<SerdeOri> for Ori {
} }
} }
} }
impl Into<SerdeOri> for Ori {
fn into(self) -> SerdeOri { SerdeOri(self.to_quat()) } impl From<Ori> for SerdeOri {
fn from(other: Ori) -> SerdeOri { SerdeOri(other.to_quat()) }
} }
impl Component for Ori { impl Component for Ori {

View File

@ -5,6 +5,7 @@ use specs_idvs::IdvStorage;
use std::{error::Error, fmt}; use std::{error::Error, fmt};
#[derive(Debug)] #[derive(Debug)]
#[allow(dead_code)] // TODO: remove once trade sim hits master
pub enum StatChangeError { pub enum StatChangeError {
Underflow, Underflow,
Overflow, Overflow,

View File

@ -7,7 +7,6 @@
arbitrary_enum_discriminant, arbitrary_enum_discriminant,
associated_type_defaults, associated_type_defaults,
bool_to_option, bool_to_option,
const_checked_int_methods,
const_generics, const_generics,
fundamental, fundamental,
iter_map_while, iter_map_while,

View File

@ -140,7 +140,7 @@ impl assets::Compound for RecipeBook {
.map(load_recipe_input) .map(load_recipe_input)
.collect::<Result<_, _>>()?; .collect::<Result<_, _>>()?;
let output = load_item_def(output)?; let output = load_item_def(output)?;
Ok((name.clone(), Recipe { inputs, output })) Ok((name.clone(), Recipe { output, inputs }))
}) })
.collect::<Result<_, assets::Error>>()?; .collect::<Result<_, assets::Error>>()?;

View File

@ -235,12 +235,10 @@ impl RegionMap {
return Some(key); return Some(key);
} else { } else {
// Check neighbors // Check neighbors
for o in region.neighbors.iter() { for idx in region.neighbors.iter().flatten() {
if let Some(idx) = o { let (key, region) = self.regions.get_index(*idx).unwrap();
let (key, region) = self.regions.get_index(*idx).unwrap(); if region.entities().contains(id) {
if region.entities().contains(id) { return Some(*key);
return Some(*key);
}
} }
} }
} }

View File

@ -45,16 +45,16 @@ pub trait SynthTyped<Context, Target> {
/// variable, but this way we don't have to implement variable lookup and it /// variable, but this way we don't have to implement variable lookup and it
/// doesn't serialize with variables). /// doesn't serialize with variables).
#[fundamental] #[fundamental]
#[serde(transparent)]
#[derive(Deserialize, Serialize)] #[derive(Deserialize, Serialize)]
#[serde(transparent)]
pub struct WeakHead<Reduction, Type> { pub struct WeakHead<Reduction, Type> {
pub red: Reduction, pub red: Reduction,
#[serde(skip)] #[serde(skip)]
pub ty: PhantomData<Type>, pub ty: PhantomData<Type>,
} }
#[serde(transparent)]
#[derive(Deserialize, Serialize)] #[derive(Deserialize, Serialize)]
#[serde(transparent)]
pub struct Pure<T>(pub T); pub struct Pure<T>(pub T);
impl<'a, Context: SubContext<S>, T, S> Typed<Context, &'a T, S> for &'a Pure<T> { impl<'a, Context: SubContext<S>, T, S> Typed<Context, &'a T, S> for &'a Pure<T> {
@ -190,15 +190,15 @@ impl<Context, Target> SynthTyped<Context, Target> for WeakHead<Pure<Target>, Tar
/// lift at some point; struct variants are not yet supported, and neither /// lift at some point; struct variants are not yet supported, and neither
/// attributes on fields. /// attributes on fields.
#[fundamental] #[fundamental]
#[serde(transparent)]
#[derive(Deserialize, Serialize)] #[derive(Deserialize, Serialize)]
#[serde(transparent)]
pub struct ElimCase<Cases> { pub struct ElimCase<Cases> {
pub cases: Cases, pub cases: Cases,
} }
#[fundamental] #[fundamental]
#[serde(transparent)]
#[derive(Deserialize, Serialize)] #[derive(Deserialize, Serialize)]
#[serde(transparent)]
pub struct ElimProj<Proj> { pub struct ElimProj<Proj> {
pub proj: Proj, pub proj: Proj,
} }

View File

@ -12,8 +12,8 @@ use std::{fmt, u64};
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq, Serialize, Deserialize)] #[derive(Copy, Clone, Debug, Eq, Hash, PartialEq, Serialize, Deserialize)]
pub struct Uid(pub u64); pub struct Uid(pub u64);
impl Into<u64> for Uid { impl From<Uid> for u64 {
fn into(self) -> u64 { self.0 } fn from(uid: Uid) -> u64 { uid.0 }
} }
impl From<u64> for Uid { impl From<u64> for Uid {

View File

@ -37,8 +37,9 @@ impl From<SerdeDir> for Dir {
} }
} }
} }
impl Into<SerdeDir> for Dir {
fn into(self) -> SerdeDir { SerdeDir(*self) } impl From<Dir> for SerdeDir {
fn from(other: Dir) -> SerdeDir { SerdeDir(*other) }
} }
/*pub enum TryFromVec3Error { /*pub enum TryFromVec3Error {
ContainsNans, ContainsNans,

View File

@ -124,10 +124,7 @@ impl<V: RectRasterableVol> VolGrid2d<V> {
} }
pub fn get_key(&self, key: Vec2<i32>) -> Option<&V> { pub fn get_key(&self, key: Vec2<i32>) -> Option<&V> {
match self.chunks.get(&key) { self.chunks.get(&key).map(|arc_chunk| arc_chunk.as_ref())
Some(arc_chunk) => Some(arc_chunk.as_ref()),
None => None,
}
} }
pub fn get_key_arc(&self, key: Vec2<i32>) -> Option<&Arc<V>> { self.chunks.get(&key) } pub fn get_key_arc(&self, key: Vec2<i32>) -> Option<&Arc<V>> { self.chunks.get(&key) }

View File

@ -123,10 +123,7 @@ impl<V: RasterableVol> VolGrid3d<V> {
} }
pub fn get_key(&self, key: Vec3<i32>) -> Option<&V> { pub fn get_key(&self, key: Vec3<i32>) -> Option<&V> {
match self.chunks.get(&key) { self.chunks.get(&key).map(|arc_chunk| arc_chunk.as_ref())
Some(arc_chunk) => Some(arc_chunk.as_ref()),
None => None,
}
} }
pub fn get_key_arc(&self, key: Vec3<i32>) -> Option<&Arc<V>> { self.chunks.get(&key) } pub fn get_key_arc(&self, key: Vec3<i32>) -> Option<&Arc<V>> { self.chunks.get(&key) }

View File

@ -1050,7 +1050,7 @@ where
f(data); f(data);
break; break;
}, },
Err(TryRecvError::Closed) => panic!(CHANNEL_ERR), Err(TryRecvError::Closed) => panic!("{}", CHANNEL_ERR),
Err(TryRecvError::Empty) => { Err(TryRecvError::Empty) => {
trace!("activly sleeping"); trace!("activly sleeping");
cnt += 1; cnt += 1;

View File

@ -1 +1 @@
nightly-2021-01-01 nightly-2021-03-22

View File

@ -6,8 +6,7 @@
bool_to_option, bool_to_option,
drain_filter, drain_filter,
option_unwrap_none, option_unwrap_none,
option_zip, option_zip
str_split_once
)] )]
#![cfg_attr(not(feature = "worldgen"), feature(const_panic))] #![cfg_attr(not(feature = "worldgen"), feature(const_panic))]

View File

@ -125,18 +125,18 @@ impl EcsSystemMetrics {
&["system"], &["system"],
)?; )?;
registry.register(Box::new(system_length_hist.clone()))?;
registry.register(Box::new(system_length_count.clone()))?;
registry.register(Box::new(system_start_time.clone()))?; registry.register(Box::new(system_start_time.clone()))?;
registry.register(Box::new(system_length_time.clone()))?; registry.register(Box::new(system_length_time.clone()))?;
registry.register(Box::new(system_thread_avg.clone()))?; registry.register(Box::new(system_thread_avg.clone()))?;
registry.register(Box::new(system_length_hist.clone()))?;
registry.register(Box::new(system_length_count.clone()))?;
Ok(Self { Ok(Self {
system_length_hist,
system_length_count,
system_start_time, system_start_time,
system_length_time, system_length_time,
system_thread_avg, system_thread_avg,
system_length_hist,
system_length_count,
}) })
} }
} }

View File

@ -162,8 +162,8 @@ impl CharacterLoader {
.unwrap(); .unwrap();
Ok(Self { Ok(Self {
update_tx,
update_rx, update_rx,
update_tx,
}) })
} }

View File

@ -18,8 +18,8 @@ pub struct NewCharacter<'a> {
} }
#[derive(Identifiable, Queryable, Debug)] #[derive(Identifiable, Queryable, Debug)]
#[primary_key(character_id)]
#[table_name = "character"] #[table_name = "character"]
#[primary_key(character_id)]
pub struct Character { pub struct Character {
pub character_id: i64, pub character_id: i64,
pub player_uuid: String, pub player_uuid: String,
@ -27,9 +27,9 @@ pub struct Character {
pub waypoint: Option<String>, pub waypoint: Option<String>,
} }
#[primary_key(item_id)]
#[table_name = "item"]
#[derive(Debug, Insertable, Queryable, AsChangeset)] #[derive(Debug, Insertable, Queryable, AsChangeset)]
#[table_name = "item"]
#[primary_key(item_id)]
pub struct Item { pub struct Item {
pub item_id: i64, pub item_id: i64,
pub parent_container_item_id: i64, pub parent_container_item_id: i64,

View File

@ -171,12 +171,12 @@ impl AmbientChannel {
pub fn new(stream: &OutputStreamHandle, tag: AmbientChannelTag) -> Self { pub fn new(stream: &OutputStreamHandle, tag: AmbientChannelTag) -> Self {
let new_sink = Sink::try_new(stream); let new_sink = Sink::try_new(stream);
match new_sink { match new_sink {
Ok(sink) => Self { sink, tag }, Ok(sink) => Self { tag, sink },
Err(_) => { Err(_) => {
warn!("Failed to create rodio sink. May not play wind sounds."); warn!("Failed to create rodio sink. May not play wind sounds.");
Self { Self {
sink: Sink::new_idle().0,
tag, tag,
sink: Sink::new_idle().0,
} }
}, },
} }

View File

@ -735,7 +735,7 @@ pub struct Hud {
force_chat_cursor: Option<Index>, force_chat_cursor: Option<Index>,
tab_complete: Option<String>, tab_complete: Option<String>,
pulse: f32, pulse: f32,
velocity: f32, _velocity: f32,
slot_manager: slots::SlotManager, slot_manager: slots::SlotManager,
hotbar: hotbar::State, hotbar: hotbar::State,
events: Vec<Event>, events: Vec<Event>,
@ -844,7 +844,7 @@ impl Hud {
force_chat_cursor: None, force_chat_cursor: None,
tab_complete: None, tab_complete: None,
pulse: 0.0, pulse: 0.0,
velocity: 0.0, _velocity: 0.0,
slot_manager, slot_manager,
hotbar: hotbar_state, hotbar: hotbar_state,
events: Vec::new(), events: Vec::new(),
@ -1791,7 +1791,7 @@ impl Hud {
// Display debug window. // Display debug window.
if let Some(debug_info) = debug_info { if let Some(debug_info) = debug_info {
self.velocity = match debug_info.velocity { self._velocity = match debug_info.velocity {
Some(velocity) => velocity.0.magnitude(), Some(velocity) => velocity.0.magnitude(),
None => 0.0, None => 0.0,
}; };

View File

@ -259,15 +259,13 @@ impl assets::Compound for LocalizationList {
let mut languages = vec![]; let mut languages = vec![];
let i18n_root = assets::path_of(specifier, ""); let i18n_root = assets::path_of(specifier, "");
for i18n_directory in std::fs::read_dir(&i18n_root)? { for i18n_entry in (std::fs::read_dir(&i18n_root)?).flatten() {
if let Ok(i18n_entry) = i18n_directory { if let Some(i18n_key) = i18n_entry.file_name().to_str() {
if let Some(i18n_key) = i18n_entry.file_name().to_str() { // load the root file of all the subdirectories
// load the root file of all the subdirectories if let Ok(localization) = cache.load::<RawLocalization>(
if let Ok(localization) = cache.load::<RawLocalization>( &[specifier, ".", i18n_key, ".", LANG_MANIFEST_FILE].concat(),
&[specifier, ".", i18n_key, ".", LANG_MANIFEST_FILE].concat(), ) {
) { languages.push(localization.read().metadata.clone());
languages.push(localization.read().metadata.clone());
}
} }
} }
} }
@ -418,7 +416,7 @@ mod tests {
{ {
Ok(true) => Some(e.final_commit_id()), Ok(true) => Some(e.final_commit_id()),
Ok(false) => Some(existing_commit), Ok(false) => Some(existing_commit),
Err(err) => panic!(err), Err(err) => panic!("{}", err),
} }
}, },
None => Some(e.final_commit_id()), None => Some(e.final_commit_id()),
@ -439,28 +437,26 @@ mod tests {
let root_dir = std::env::current_dir() let root_dir = std::env::current_dir()
.map(|p| p.parent().expect("").to_owned()) .map(|p| p.parent().expect("").to_owned())
.unwrap(); .unwrap();
for i18n_file in root_dir.join(&dir).read_dir().unwrap() { //TODO: review unwraps in this file
if let Ok(i18n_file) = i18n_file { for i18n_file in root_dir.join(&dir).read_dir().unwrap().flatten() {
if let Ok(file_type) = i18n_file.file_type() { if let Ok(file_type) = i18n_file.file_type() {
if file_type.is_file() { if file_type.is_file() {
let full_path = i18n_file.path(); let full_path = i18n_file.path();
let path = full_path.strip_prefix(&root_dir).unwrap(); let path = full_path.strip_prefix(&root_dir).unwrap();
println!("-> {:?}", i18n_file.file_name()); println!("-> {:?}", i18n_file.file_name());
let i18n_blob = read_file_from_path(&repo, &head_ref, &path); let i18n_blob = read_file_from_path(&repo, &head_ref, &path);
let i18n: LocalizationFragment = match from_bytes(i18n_blob.content()) { let i18n: LocalizationFragment = match from_bytes(i18n_blob.content()) {
Ok(v) => v, Ok(v) => v,
Err(e) => { Err(e) => {
eprintln!( eprintln!(
"Could not parse {} RON file, skipping: {}", "Could not parse {} RON file, skipping: {}",
i18n_file.path().to_string_lossy(), i18n_file.path().to_string_lossy(),
e e
); );
continue; continue;
}, },
}; };
i18n_key_versions i18n_key_versions.extend(generate_key_version(&repo, &i18n, &path, &i18n_blob));
.extend(generate_key_version(&repo, &i18n, &path, &i18n_blob));
}
} }
} }
} }
@ -471,29 +467,27 @@ mod tests {
.map(|p| p.parent().expect("").to_owned()) .map(|p| p.parent().expect("").to_owned())
.unwrap(); .unwrap();
// Walk through each file in the directory // Walk through each file in the directory
for i18n_file in root_dir.join(&directory_path).read_dir().unwrap() { for i18n_file in root_dir.join(&directory_path).read_dir().unwrap().flatten() {
if let Ok(i18n_file) = i18n_file { if let Ok(file_type) = i18n_file.file_type() {
if let Ok(file_type) = i18n_file.file_type() { // Skip folders and the manifest file (which does not contain the same struct we
// Skip folders and the manifest file (which does not contain the same struct we // want to load)
// want to load) if file_type.is_file()
if file_type.is_file() && i18n_file.file_name().to_string_lossy()
&& i18n_file.file_name().to_string_lossy() != (LANG_MANIFEST_FILE.to_string() + ".ron")
!= (LANG_MANIFEST_FILE.to_string() + ".ron") {
{ let full_path = i18n_file.path();
let full_path = i18n_file.path(); println!("-> {:?}", full_path.strip_prefix(&root_dir).unwrap());
println!("-> {:?}", full_path.strip_prefix(&root_dir).unwrap()); let f = fs::File::open(&full_path).expect("Failed opening file");
let f = fs::File::open(&full_path).expect("Failed opening file"); let _: LocalizationFragment = match from_reader(f) {
let _: LocalizationFragment = match from_reader(f) { Ok(v) => v,
Ok(v) => v, Err(e) => {
Err(e) => { panic!(
panic!( "Could not parse {} RON file, error: {}",
"Could not parse {} RON file, error: {}", full_path.to_string_lossy(),
full_path.to_string_lossy(), e
e );
); },
}, };
};
}
} }
} }
} }

View File

@ -77,7 +77,7 @@ gfx_defines! {
} }
impl Vertex { impl Vertex {
#[allow(clippy::collapsible_if)] #[allow(clippy::collapsible_else_if)]
pub fn new(pos: Vec3<f32>, norm: Vec3<f32>) -> Self { pub fn new(pos: Vec3<f32>, norm: Vec3<f32>) -> Self {
let norm_bits = if norm.x != 0.0 { let norm_bits = if norm.x != 0.0 {
if norm.x < 0.0 { 0 } else { 1 } if norm.x < 0.0 { 0 } else { 1 }

View File

@ -87,7 +87,7 @@ impl fmt::Display for Vertex {
impl Vertex { impl Vertex {
// NOTE: Limit to 16 (x) × 16 (y) × 32 (z). // NOTE: Limit to 16 (x) × 16 (y) × 32 (z).
#[allow(clippy::collapsible_if)] #[allow(clippy::collapsible_else_if)]
pub fn new( pub fn new(
atlas_pos: Vec2<u16>, atlas_pos: Vec2<u16>,
pos: Vec3<f32>, pos: Vec3<f32>,

View File

@ -272,6 +272,7 @@ pub struct Renderer {
lod_terrain_pipeline: GfxPipeline<lod_terrain::pipe::Init<'static>>, lod_terrain_pipeline: GfxPipeline<lod_terrain::pipe::Init<'static>>,
clouds_pipeline: GfxPipeline<clouds::pipe::Init<'static>>, clouds_pipeline: GfxPipeline<clouds::pipe::Init<'static>>,
postprocess_pipeline: GfxPipeline<postprocess::pipe::Init<'static>>, postprocess_pipeline: GfxPipeline<postprocess::pipe::Init<'static>>,
#[allow(dead_code)] //TODO: remove ?
player_shadow_pipeline: GfxPipeline<figure::pipe::Init<'static>>, player_shadow_pipeline: GfxPipeline<figure::pipe::Init<'static>>,
shaders: AssetHandle<Shaders>, shaders: AssetHandle<Shaders>,
@ -358,16 +359,16 @@ impl Renderer {
directed_sampler, directed_sampler,
) = shadow_views; ) = shadow_views;
Some(ShadowMapRenderer { Some(ShadowMapRenderer {
directed_depth_stencil_view,
directed_res,
directed_sampler,
// point_encoder: factory.create_command_buffer().into(), // point_encoder: factory.create_command_buffer().into(),
// directed_encoder: factory.create_command_buffer().into(), // directed_encoder: factory.create_command_buffer().into(),
point_depth_stencil_view, point_depth_stencil_view,
point_res, point_res,
point_sampler, point_sampler,
directed_depth_stencil_view,
directed_res,
directed_sampler,
point_pipeline, point_pipeline,
terrain_directed_pipeline, terrain_directed_pipeline,
figure_directed_pipeline, figure_directed_pipeline,

View File

@ -129,14 +129,14 @@ impl BlocksOfInterest {
reeds, reeds,
fireflies, fireflies,
flowers, flowers,
interactables,
lights,
fire_bowls, fire_bowls,
snow, snow,
cricket1, cricket1,
cricket2, cricket2,
cricket3, cricket3,
frogs, frogs,
interactables,
lights,
} }
} }
} }

View File

@ -184,6 +184,7 @@ impl IcedUi {
let messages = { let messages = {
span!(_guard, "update user_interface"); span!(_guard, "update user_interface");
let mut messages = Vec::new(); let mut messages = Vec::new();
#[allow(clippy::manual_map)]
let _event_status_list = user_interface.update( let _event_status_list = user_interface.update(
&self.events, &self.events,
cursor_position, cursor_position,

View File

@ -22,9 +22,9 @@ pub struct MouseDetector<'a> {
impl<'a> MouseDetector<'a> { impl<'a> MouseDetector<'a> {
pub fn new(state: &'a mut State, width: Length, height: Length) -> Self { pub fn new(state: &'a mut State, width: Length, height: Length) -> Self {
Self { Self {
state,
width, width,
height, height,
state,
} }
} }
} }

View File

@ -45,7 +45,9 @@ pub struct ImageSlider<T, K> {
struct Track { struct Track {
image_id: image::Id, image_id: image::Id,
color: Option<Color>, color: Option<Color>,
#[allow(dead_code)]
src_rect: Option<Rect>, src_rect: Option<Rect>,
#[allow(dead_code)]
breadth: Option<f32>, breadth: Option<f32>,
// Padding on the ends of the track constraining the slider to a smaller area. // Padding on the ends of the track constraining the slider to a smaller area.
padding: (f32, f32), padding: (f32, f32),
@ -56,6 +58,7 @@ struct Slider {
hover_image_id: Option<image::Id>, hover_image_id: Option<image::Id>,
press_image_id: Option<image::Id>, press_image_id: Option<image::Id>,
color: Option<Color>, color: Option<Color>,
#[allow(dead_code)]
src_rect: Option<Rect>, src_rect: Option<Rect>,
length: Option<f32>, length: Option<f32>,
} }

View File

@ -1391,13 +1391,9 @@ impl Window {
*remapping = None; *remapping = None;
None None
}, },
None => { None => controls
if let Some(game_inputs) = controls.get_associated_game_inputs(&key_mouse) { .get_associated_game_inputs(&key_mouse)
Some(game_inputs.iter()) .map(|game_inputs| game_inputs.iter()),
} else {
None
}
},
} }
} }

View File

@ -13,7 +13,6 @@
const_panic, const_panic,
label_break_value, label_break_value,
or_patterns, or_patterns,
array_value_iter,
array_map array_map
)] )]

View File

@ -358,7 +358,8 @@ pub struct WorldSim {
/// post-erosion warping, cliffs, and other things like that). /// post-erosion warping, cliffs, and other things like that).
pub max_height: f32, pub max_height: f32,
pub(crate) chunks: Vec<SimChunk>, pub(crate) chunks: Vec<SimChunk>,
pub(crate) locations: Vec<Location>, //TODO: remove or use this property
pub(crate) _locations: Vec<Location>,
pub(crate) gen_ctx: GenCtx, pub(crate) gen_ctx: GenCtx,
pub rng: ChaChaRng, pub rng: ChaChaRng,
@ -1397,7 +1398,7 @@ impl WorldSim {
map_size_lg, map_size_lg,
max_height: maxh as f32, max_height: maxh as f32,
chunks, chunks,
locations: Vec::new(), _locations: Vec::new(),
gen_ctx, gen_ctx,
rng, rng,
}; };
@ -1718,7 +1719,7 @@ impl WorldSim {
} }
self.rng = rng; self.rng = rng;
self.locations = locations; self._locations = locations;
} }
pub fn get(&self, chunk_pos: Vec2<i32>) -> Option<&SimChunk> { pub fn get(&self, chunk_pos: Vec2<i32>) -> Option<&SimChunk> {

View File

@ -1170,7 +1170,7 @@ impl Floor {
} }
// Find orientation of a position relative to another position // Find orientation of a position relative to another position
#[allow(clippy::collapsible_if)] #[allow(clippy::collapsible_else_if)]
fn relative_ori(pos1: Vec2<i32>, pos2: Vec2<i32>) -> u8 { fn relative_ori(pos1: Vec2<i32>, pos2: Vec2<i32>) -> u8 {
if (pos1.x - pos2.x).abs() < (pos1.y - pos2.y).abs() { if (pos1.x - pos2.x).abs() < (pos1.y - pos2.y).abs() {
if pos1.y > pos2.y { 4 } else { 8 } if pos1.y > pos2.y { 4 } else { 8 }

View File

@ -740,10 +740,10 @@ fn wpos_is_hazard(land: &Land, wpos: Vec2<i32>) -> Option<HazardKind> {
.map_or(true, |c| c.river.near_water()) .map_or(true, |c| c.river.near_water())
{ {
Some(HazardKind::Water) Some(HazardKind::Water)
} else if let Some(gradient) = Some(land.get_gradient_approx(wpos)).filter(|g| *g > 0.8) {
Some(HazardKind::Hill { gradient })
} else { } else {
None Some(land.get_gradient_approx(wpos))
.filter(|g| *g > 0.8)
.map(|gradient| HazardKind::Hill { gradient })
} }
} }