switch to tracing stlye and enhance logs with usefull information

- Updated CHANGELOG
- reduce dependencies
- found out that we have alot of duplicate coding... alot...
This commit is contained in:
Marcel Märtens 2020-06-21 23:47:49 +02:00
parent 589254e4ab
commit 9485b45e70
53 changed files with 275 additions and 297 deletions

View File

@ -37,6 +37,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Energy regen resets on last ability use instead of on wield - Energy regen resets on last ability use instead of on wield
- Fixed unable to use ability; Secondary and ability3 (fire rod) will now automatically wield - Fixed unable to use ability; Secondary and ability3 (fire rod) will now automatically wield
- Gliding is now a toggle that can be triggered from the ground - Gliding is now a toggle that can be triggered from the ground
- Replaced `log` with `tracing` in all crates
### Removed ### Removed

12
Cargo.lock generated
View File

@ -4187,7 +4187,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a41f40ed0e162c911ac6fcb53ecdc8134c46905fdbbae8c50add462a538b495f" checksum = "a41f40ed0e162c911ac6fcb53ecdc8134c46905fdbbae8c50add462a538b495f"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"tracing-attributes",
"tracing-core", "tracing-core",
] ]
@ -4202,17 +4201,6 @@ dependencies = [
"tracing-subscriber", "tracing-subscriber",
] ]
[[package]]
name = "tracing-attributes"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99bbad0de3fd923c9c3232ead88510b783e5a4d16a6154adffa3d53308de984c"
dependencies = [
"proc-macro2 1.0.18",
"quote 1.0.7",
"syn 1.0.31",
]
[[package]] [[package]]
name = "tracing-core" name = "tracing-core"
version = "0.1.10" version = "0.1.10"

View File

@ -8,5 +8,5 @@ edition = "2018"
client = { package = "veloren-client", path = "../client" } client = { package = "veloren-client", path = "../client" }
common = { package = "veloren-common", path = "../common" } common = { package = "veloren-common", path = "../common" }
tracing = { version = "0.1", default-features = false , features = ["attributes"] } tracing = { version = "0.1", default-features = false }
tracing-subscriber = { version = "0.2.3", default-features = false, features = ["fmt", "chrono", "ansi", "smallvec"] } tracing-subscriber = { version = "0.2.3", default-features = false, features = ["fmt", "chrono", "ansi", "smallvec"] }

View File

@ -11,7 +11,7 @@ byteorder = "1.3.2"
uvth = "3.1.1" uvth = "3.1.1"
image = { version = "0.22.3", default-features = false, features = ["png"] } image = { version = "0.22.3", default-features = false, features = ["png"] }
num_cpus = "1.10.1" num_cpus = "1.10.1"
tracing = { version = "0.1", default-features = false , features = ["attributes"] } tracing = { version = "0.1", default-features = false }
specs = "0.15.1" specs = "0.15.1"
vek = { version = "0.11.0", features = ["serde"] } vek = { version = "0.11.0", features = ["serde"] }
hashbrown = { version = "0.6", features = ["rayon", "serde", "nightly"] } hashbrown = { version = "0.6", features = ["rayon", "serde", "nightly"] }

View File

@ -522,7 +522,7 @@ impl Client {
// 1) Handle input from frontend. // 1) Handle input from frontend.
// Pass character actions from frontend input to the player's entity. // Pass character actions from frontend input to the player's entity.
if let ClientState::Character = self.client_state { if let ClientState::Character = self.client_state {
if let Err(err) = self if let Err(e) = self
.state .state
.ecs() .ecs()
.write_storage::<Controller>() .write_storage::<Controller>()
@ -537,9 +537,11 @@ impl Client {
.inputs = inputs.clone(); .inputs = inputs.clone();
}) })
{ {
let entry = self.entity;
error!( error!(
"Couldn't access controller component on client entity: {:?}", ?e,
err ?entry,
"Couldn't access controller component on client entity"
); );
} }
self.postbox self.postbox

View File

@ -20,7 +20,7 @@ serde_derive = "1.0"
serde_json = "1.0.41" serde_json = "1.0.41"
ron = { version = "0.6", default-features = false } ron = { version = "0.6", default-features = false }
bincode = "1.2.0" bincode = "1.2.0"
tracing = { version = "0.1", default-features = false , features = ["attributes"] } tracing = { version = "0.1", default-features = false }
rand = "0.7" rand = "0.7"
rayon = "^1.3.0" rayon = "^1.3.0"
lazy_static = "1.4.0" lazy_static = "1.4.0"

View File

@ -14,7 +14,7 @@ use std::{
path::PathBuf, path::PathBuf,
sync::{Arc, RwLock}, sync::{Arc, RwLock},
}; };
use tracing::{debug, error, trace}; use tracing::{trace, error};
/// The error returned by asset loading functions /// The error returned by asset loading functions
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -120,8 +120,8 @@ pub fn load_glob<A: Asset + 'static>(specifier: &str) -> Result<Arc<Vec<Arc<A>>>
load(&specifier.replace("*", &name)) load(&specifier.replace("*", &name))
.map_err(|e| { .map_err(|e| {
error!( error!(
"Failed to load \"{}\" as part of glob \"{}\" with error: {:?}", ?e,
name, specifier, e "Failed to load \"{}\" as part of glob \"{}\"", name, specifier
) )
}) })
.ok() .ok()
@ -204,8 +204,8 @@ pub fn load_watched<A: Asset + 'static>(
indicator.add( indicator.add(
path_with_extension.ok_or_else(|| Error::NotFound(path.to_string_lossy().into_owned()))?, path_with_extension.ok_or_else(|| Error::NotFound(path.to_string_lossy().into_owned()))?,
move || { move || {
if let Err(err) = reload::<A>(&owned_specifier) { if let Err(e) = reload::<A>(&owned_specifier) {
error!("Error reloading {}: {:#?}", &owned_specifier, err); error!(?e, ?owned_specifier, "Error reloading owned_specifier");
} }
}, },
); );
@ -351,7 +351,7 @@ pub fn load_file(specifier: &str, endings: &[&str]) -> Result<BufReader<File>, E
let mut path = path.clone(); let mut path = path.clone();
path.set_extension(ending); path.set_extension(ending);
trace!("Trying to access \"{:?}\"", path); trace!(?path, "Trying to access");
if let Ok(file) = File::open(path) { if let Ok(file) = File::open(path) {
return Ok(BufReader::new(file)); return Ok(BufReader::new(file));
} }
@ -367,7 +367,7 @@ pub fn load_file_glob(specifier: &str, endings: &[&str]) -> Result<BufReader<Fil
let mut path = path.clone(); let mut path = path.clone();
path.set_extension(ending); path.set_extension(ending);
debug!("Trying to access \"{:?}\"", path); trace!(?path, "Trying to access");
if let Ok(file) = File::open(path) { if let Ok(file) = File::open(path) {
return Ok(BufReader::new(file)); return Ok(BufReader::new(file));
} }

View File

@ -50,8 +50,8 @@ impl Watcher {
} }
}, },
None => { None => {
if let Err(err) = self.watcher.watch(path.clone(), RecursiveMode::Recursive) { if let Err(e) = self.watcher.watch(path.clone(), RecursiveMode::Recursive) {
warn!("Could not start watching {:#?} due to: {}", &path, err); warn!(?e, ?path, "Could not start watching file");
return; return;
} }
self.watching.insert(path, (handler, vec![signal])); self.watching.insert(path, (handler, vec![signal]));
@ -121,7 +121,7 @@ impl Watcher {
recv(self.event_rx) -> res => match res { recv(self.event_rx) -> res => match res {
Ok(Ok(event)) => self.handle_event(event), Ok(Ok(event)) => self.handle_event(event),
// Notify Error // Notify Error
Ok(Err(err)) => error!("Notify error: {}", err), Ok(Err(e)) => error!(?e, "Notify error"),
// Disconnected // Disconnected
Err(_) => (), Err(_) => (),
}, },

View File

@ -21,8 +21,8 @@ pub trait CompPacket: Clone + Debug + Send + 'static {
/// Useful for implementing CompPacket trait /// Useful for implementing CompPacket trait
pub fn handle_insert<C: Component>(comp: C, entity: Entity, world: &World) { pub fn handle_insert<C: Component>(comp: C, entity: Entity, world: &World) {
if let Err(err) = world.write_storage::<C>().insert(entity, comp) { if let Err(e) = world.write_storage::<C>().insert(entity, comp) {
error!("Error inserting : {:?}", err); error!(?e, "Error inserting");
} }
} }
/// Useful for implementing CompPacket trait /// Useful for implementing CompPacket trait
@ -31,8 +31,8 @@ pub fn handle_modify<C: Component + Debug>(comp: C, entity: Entity, world: &Worl
*c = comp *c = comp
} else { } else {
error!( error!(
"Error modifying synced component: {:?}, it doesn't seem to exist", ?comp,
comp "Error modifying synced component, it doesn't seem to exist"
); );
} }
} }

View File

@ -90,8 +90,8 @@ impl WorldSyncExt for specs::World {
// Clear from uid allocator // Clear from uid allocator
let maybe_entity = self.write_resource::<UidAllocator>().remove_entity(uid); let maybe_entity = self.write_resource::<UidAllocator>().remove_entity(uid);
if let Some(entity) = maybe_entity { if let Some(entity) = maybe_entity {
if let Err(err) = self.delete_entity(entity) { if let Err(e) = self.delete_entity(entity) {
error!("Failed to delete entity: {:?}", err); error!(?e, "Failed to delete entity");
} }
} }
} }

View File

@ -19,10 +19,16 @@ impl From<SerdeDir> for Dir {
fn from(dir: SerdeDir) -> Self { fn from(dir: SerdeDir) -> Self {
let dir = dir.0; let dir = dir.0;
if dir.map(f32::is_nan).reduce_or() { if dir.map(f32::is_nan).reduce_or() {
warn!("Deserialized dir containing NaNs, replacing with default"); warn!(
?dir,
"Deserialized dir containing NaNs, replacing with default"
);
Default::default() Default::default()
} else if !dir.is_normalized() { } else if !dir.is_normalized() {
warn!("Deserialized unnormalized dir, replacing with default"); warn!(
?dir,
"Deserialized unnormalized dir, replacing with default"
);
Default::default() Default::default()
} else { } else {
Self(dir) Self(dir)
@ -109,7 +115,7 @@ fn slerp_normalized(from: vek::Vec3<f32>, to: vek::Vec3<f32>, factor: f32) -> ve
}; };
if unnormalized { if unnormalized {
panic!("Called slerp_normalized with unnormalized from: {:?}", from); panic!("Called slerp_normalized with unnormalized `from`: {}", from);
} }
} }
@ -122,7 +128,7 @@ fn slerp_normalized(from: vek::Vec3<f32>, to: vek::Vec3<f32>, factor: f32) -> ve
}; };
if unnormalized { if unnormalized {
panic!("Called slerp_normalized with unnormalized to: {:?}", to); panic!("Called slerp_normalized with unnormalized `to`: {}", to);
} }
} }

View File

@ -12,5 +12,5 @@ default = ["worldgen"]
server = { package = "veloren-server", path = "../server", default-features = false } server = { package = "veloren-server", path = "../server", default-features = false }
common = { package = "veloren-common", path = "../common" } common = { package = "veloren-common", path = "../common" }
tracing = { version = "0.1", default-features = false , features = ["attributes"] } tracing = { version = "0.1", default-features = false }
tracing-subscriber = { version = "0.2.3", default-features = false, features = ["fmt", "chrono", "ansi", "smallvec"] } tracing-subscriber = { version = "0.2.3", default-features = false, features = ["fmt", "chrono", "ansi", "smallvec"] }

View File

@ -30,7 +30,7 @@ fn main() {
let mut server = Server::new(settings).expect("Failed to create server instance!"); let mut server = Server::new(settings).expect("Failed to create server instance!");
info!("Server is ready to accept connections."); info!("Server is ready to accept connections.");
info!("Metrics port: {}", metrics_port); info!(?metrics_port, "starting metrics at port");
loop { loop {
let events = server let events = server

View File

@ -14,7 +14,7 @@ world = { package = "veloren-world", path = "../world" }
specs-idvs = { git = "https://gitlab.com/veloren/specs-idvs.git" } specs-idvs = { git = "https://gitlab.com/veloren/specs-idvs.git" }
tracing = { version = "0.1", default-features = false , features = ["attributes"] } tracing = { version = "0.1", default-features = false }
specs = { version = "0.15.1", features = ["shred-derive"] } specs = { version = "0.15.1", features = ["shred-derive"] }
vek = "0.11.0" vek = "0.11.0"
uvth = "3.1.1" uvth = "3.1.1"

View File

@ -35,7 +35,7 @@ impl AuthProvider {
pub fn logout(&mut self, uuid: Uuid) { pub fn logout(&mut self, uuid: Uuid) {
if self.accounts.remove(&uuid).is_none() { if self.accounts.remove(&uuid).is_none() {
error!("Attempted to logout user that is not logged in."); error!(?uuid, "Attempted to logout user that is not logged in.");
}; };
} }
@ -45,7 +45,7 @@ impl AuthProvider {
match &self.auth_server { match &self.auth_server {
// Token from auth server expected // Token from auth server expected
Some(srv) => { Some(srv) => {
info!("Validating '{}' token.", &username_or_token); info!(?username_or_token, "Validating token");
// Parse token // Parse token
let token = AuthToken::from_str(&username_or_token) let token = AuthToken::from_str(&username_or_token)
.map_err(|e| RegisterError::AuthError(e.to_string()))?; .map_err(|e| RegisterError::AuthError(e.to_string()))?;
@ -66,7 +66,7 @@ impl AuthProvider {
let username = username_or_token; let username = username_or_token;
let uuid = derive_uuid(&username); let uuid = derive_uuid(&username);
if !self.accounts.contains_key(&uuid) { if !self.accounts.contains_key(&uuid) {
info!("New User '{}'", username); info!(?username, "New User");
self.accounts.insert(uuid, username.clone()); self.accounts.insert(uuid, username.clone());
Ok((username, uuid)) Ok((username, uuid))
} else { } else {

View File

@ -1265,8 +1265,8 @@ fn handle_remove_lights(
let size = to_delete.len(); let size = to_delete.len();
for entity in to_delete { for entity in to_delete {
if let Err(err) = server.state.delete_entity_recorded(entity) { if let Err(e) = server.state.delete_entity_recorded(entity) {
error!("Failed to delete light: {:?}", err); error!(?e, "Failed to delete light: {:?}", e);
} }
} }

View File

@ -80,13 +80,13 @@ pub fn handle_destroy(server: &mut Server, entity: EcsEntity, cause: HealthSourc
.write_storage() .write_storage()
.insert(entity, comp::Vel(Vec3::zero())) .insert(entity, comp::Vel(Vec3::zero()))
.err() .err()
.map(|err| error!("Failed to set zero vel on dead client: {:?}", err)); .map(|e| error!(?e, ?entity, "Failed to set zero vel on dead client"));
state state
.ecs() .ecs()
.write_storage() .write_storage()
.insert(entity, comp::ForceUpdate) .insert(entity, comp::ForceUpdate)
.err() .err()
.map(|err| error!("Failed to insert ForceUpdate on dead client: {:?}", err)); .map(|e| error!(?e, ?entity, "Failed to insert ForceUpdate on dead client"));
state state
.ecs() .ecs()
.write_storage::<comp::LightEmitter>() .write_storage::<comp::LightEmitter>()
@ -306,14 +306,14 @@ pub fn handle_destroy(server: &mut Server, entity: EcsEntity, cause: HealthSourc
} else { } else {
let _ = state let _ = state
.delete_entity_recorded(entity) .delete_entity_recorded(entity)
.map_err(|err| error!("Failed to delete destroyed entity: {:?}", err)); .map_err(|e| error!(?e, ?entity, "Failed to delete destroyed entity"));
} }
// TODO: Add Delete(time_left: Duration) component // TODO: Add Delete(time_left: Duration) component
/* /*
// If not a player delete the entity // If not a player delete the entity
if let Err(err) = state.delete_entity_recorded(entity) { if let Err(err) = state.delete_entity_recorded(entity) {
error!("Failed to delete destroyed entity: {:?}", err); error!(?e, "Failed to delete destroyed entity");
} }
*/ */
} }
@ -361,10 +361,10 @@ pub fn handle_respawn(server: &Server, entity: EcsEntity) {
.write_storage() .write_storage()
.insert(entity, comp::ForceUpdate) .insert(entity, comp::ForceUpdate)
.err() .err()
.map(|err| { .map(|e| {
error!( error!(
"Error inserting ForceUpdate component when respawning client: {:?}", ?e,
err "Error inserting ForceUpdate component when respawning client"
) )
}); });
} }

View File

@ -121,12 +121,10 @@ pub fn handle_possess(server: &Server, possessor_uid: Uid, possesse_uid: Uid) {
if clients.get_mut(possesse).is_none() { if clients.get_mut(possesse).is_none() {
if let Some(mut client) = clients.remove(possessor) { if let Some(mut client) = clients.remove(possessor) {
client.notify(ServerMsg::SetPlayerEntity(possesse_uid.into())); client.notify(ServerMsg::SetPlayerEntity(possesse_uid.into()));
clients.insert(possesse, client).err().map(|e| { clients
error!( .insert(possesse, client)
"Error inserting client component during possession: {:?}", .err()
e .map(|e| error!(?e, "Error inserting client component during possession"));
)
});
// Put possess item into loadout // Put possess item into loadout
let mut loadouts = ecs.write_storage::<comp::Loadout>(); let mut loadouts = ecs.write_storage::<comp::Loadout>();
let loadout = loadouts let loadout = loadouts
@ -155,10 +153,7 @@ pub fn handle_possess(server: &Server, possessor_uid: Uid, possesse_uid: Uid) {
let mut players = ecs.write_storage::<comp::Player>(); let mut players = ecs.write_storage::<comp::Player>();
if let Some(player) = players.remove(possessor) { if let Some(player) = players.remove(possessor) {
players.insert(possesse, player).err().map(|e| { players.insert(possesse, player).err().map(|e| {
error!( error!(?e, "Error inserting player component during possession")
"Error inserting player component during possession: {:?}",
e
)
}); });
} }
} }
@ -168,8 +163,8 @@ pub fn handle_possess(server: &Server, possessor_uid: Uid, possesse_uid: Uid) {
if let Some(s) = subscriptions.remove(possessor) { if let Some(s) = subscriptions.remove(possessor) {
subscriptions.insert(possesse, s).err().map(|e| { subscriptions.insert(possesse, s).err().map(|e| {
error!( error!(
"Error inserting subscription component during possession: {:?}", ?e,
e "Error inserting subscription component during possession"
) )
}); });
} }
@ -185,7 +180,7 @@ pub fn handle_possess(server: &Server, possessor_uid: Uid, possesse_uid: Uid) {
let mut admins = ecs.write_storage::<comp::Admin>(); let mut admins = ecs.write_storage::<comp::Admin>();
if let Some(admin) = admins.remove(possessor) { if let Some(admin) = admins.remove(possessor) {
admins.insert(possesse, admin).err().map(|e| { admins.insert(possesse, admin).err().map(|e| {
error!("Error inserting admin component during possession: {:?}", e) error!(?e, "Error inserting admin component during possession")
}); });
} }
} }
@ -194,10 +189,7 @@ pub fn handle_possess(server: &Server, possessor_uid: Uid, possesse_uid: Uid) {
let mut waypoints = ecs.write_storage::<comp::Waypoint>(); let mut waypoints = ecs.write_storage::<comp::Waypoint>();
if let Some(waypoint) = waypoints.remove(possessor) { if let Some(waypoint) = waypoints.remove(possessor) {
waypoints.insert(possesse, waypoint).err().map(|e| { waypoints.insert(possesse, waypoint).err().map(|e| {
error!( error!(?e, "Error inserting waypoint component during possession",)
"Error inserting waypoint component during possession {:?}",
e
)
}); });
} }
} }

View File

@ -108,7 +108,10 @@ pub fn handle_inventory(server: &mut Server, entity: EcsEntity, manip: comp::Inv
let inventory = if let Some(inventory) = inventories.get_mut(entity) { let inventory = if let Some(inventory) = inventories.get_mut(entity) {
inventory inventory
} else { } else {
error!("Can't manipulate inventory, entity doesn't have one"); error!(
?entity,
"Can't manipulate inventory, entity doesn't have one"
);
return; return;
}; };
@ -220,7 +223,7 @@ pub fn handle_inventory(server: &mut Server, entity: EcsEntity, manip: comp::Inv
slot::unequip(slot, inventory, loadout); slot::unequip(slot, inventory, loadout);
Some(comp::InventoryUpdateEvent::Used) Some(comp::InventoryUpdateEvent::Used)
} else { } else {
error!("Entity doesn't have a loadout, can't unequip..."); error!(?entity, "Entity doesn't have a loadout, can't unequip...");
None None
} }
}, },

View File

@ -36,8 +36,12 @@ pub fn handle_exit_ingame(server: &mut Server, entity: EcsEntity) {
entity_builder.with(uid).build(); entity_builder.with(uid).build();
} }
// Delete old entity // Delete old entity
if let Err(err) = state.delete_entity_recorded(entity) { if let Err(e) = state.delete_entity_recorded(entity) {
error!("Failed to delete entity when removing character: {:?}", err); error!(
?e,
?entity,
"Failed to delete entity when removing character"
);
} }
} }
@ -87,8 +91,8 @@ pub fn handle_client_disconnect(server: &mut Server, entity: EcsEntity) -> Event
} }
// Delete client entity // Delete client entity
if let Err(err) = state.delete_entity_recorded(entity) { if let Err(e) = state.delete_entity_recorded(entity) {
error!("Failed to delete disconnected client: {:?}", err); error!(?e, ?entity, "Failed to delete disconnected client");
} }
Event::ClientDisconnected { entity } Event::ClientDisconnected { entity }

View File

@ -253,19 +253,16 @@ impl Server {
// Run pending DB migrations (if any) // Run pending DB migrations (if any)
debug!("Running DB migrations..."); debug!("Running DB migrations...");
if let Some(error) = if let Some(e) = persistence::run_migrations(&this.server_settings.persistence_db_dir).err()
persistence::run_migrations(&this.server_settings.persistence_db_dir).err()
{ {
info!("Migration error: {}", format!("{:#?}", error)); info!(?e, "Migration error");
} }
debug!("created veloren server with: {:?}", &settings); debug!(?settings, "created veloren server with");
info!( let git_hash = *common::util::GIT_HASH;
"Server version: {}[{}]", let git_date = *common::util::GIT_DATE;
*common::util::GIT_HASH, info!(?git_hash, ?git_date, "Server version",);
*common::util::GIT_DATE
);
Ok(this) Ok(this)
} }
@ -378,8 +375,8 @@ impl Server {
.collect::<Vec<_>>() .collect::<Vec<_>>()
}; };
for entity in to_delete { for entity in to_delete {
if let Err(err) = self.state.delete_entity_recorded(entity) { if let Err(e) = self.state.delete_entity_recorded(entity) {
error!("Failed to delete agent outside the terrain: {:?}", err); error!(?e, "Failed to delete agent outside the terrain");
} }
} }

View File

@ -142,7 +142,7 @@ impl ServerMetrics {
Ok(Some(rq)) => rq, Ok(Some(rq)) => rq,
Ok(None) => continue, Ok(None) => continue,
Err(e) => { Err(e) => {
println!("error: {}", e); error!(?e, "metrics http server error");
break; break;
}, },
}; };
@ -157,8 +157,8 @@ impl ServerMetrics {
); );
if let Err(e) = request.respond(response) { if let Err(e) = request.respond(response) {
error!( error!(
"The metrics HTTP server had encountered and error with answering, {}", ?e,
e "The metrics HTTP server had encountered and error with answering",
); );
} }
} }

View File

@ -77,13 +77,15 @@ pub fn load_character_data(
let row = NewLoadout::from((character_data.id, &default_loadout)); let row = NewLoadout::from((character_data.id, &default_loadout));
if let Err(error) = diesel::insert_into(schema::loadout::table) if let Err(e) = diesel::insert_into(schema::loadout::table)
.values(&row) .values(&row)
.execute(&connection) .execute(&connection)
{ {
let char_id = character_data.id;
warn!( warn!(
"Failed to create an loadout record for character {}: {}", ?e,
&character_data.id, error ?char_id,
"Failed to create an loadout record for character",
) )
} }
@ -314,8 +316,8 @@ impl CharacterUpdater {
}) })
.collect(); .collect();
if let Err(err) = self.update_tx.as_ref().unwrap().send(updates) { if let Err(e) = self.update_tx.as_ref().unwrap().send(updates) {
error!("Could not send stats updates: {:?}", err); error!(?e, "Could not send stats updates");
} }
} }
@ -333,7 +335,7 @@ impl CharacterUpdater {
fn batch_update(updates: impl Iterator<Item = (i32, CharacterUpdateData)>, db_dir: &str) { fn batch_update(updates: impl Iterator<Item = (i32, CharacterUpdateData)>, db_dir: &str) {
let connection = establish_connection(db_dir); let connection = establish_connection(db_dir);
if let Err(err) = connection.transaction::<_, diesel::result::Error, _>(|| { if let Err(e) = connection.transaction::<_, diesel::result::Error, _>(|| {
updates.for_each( updates.for_each(
|(character_id, (stats_update, inventory_update, loadout_update))| { |(character_id, (stats_update, inventory_update, loadout_update))| {
update( update(
@ -348,7 +350,7 @@ fn batch_update(updates: impl Iterator<Item = (i32, CharacterUpdateData)>, db_di
Ok(()) Ok(())
}) { }) {
error!("Error during stats batch update transaction: {:?}", err); error!(?e, "Error during stats batch update transaction");
} }
} }
@ -359,47 +361,42 @@ fn update(
loadout: &LoadoutUpdate, loadout: &LoadoutUpdate,
connection: &SqliteConnection, connection: &SqliteConnection,
) { ) {
if let Err(error) = if let Err(e) =
diesel::update(schema::stats::table.filter(schema::stats::character_id.eq(character_id))) diesel::update(schema::stats::table.filter(schema::stats::character_id.eq(character_id)))
.set(stats) .set(stats)
.execute(connection) .execute(connection)
{ {
warn!( warn!(?e, ?character_id, "Failed to update stats for character",)
"Failed to update stats for character: {:?}: {:?}",
character_id, error
)
} }
if let Err(error) = diesel::update( if let Err(e) = diesel::update(
schema::inventory::table.filter(schema::inventory::character_id.eq(character_id)), schema::inventory::table.filter(schema::inventory::character_id.eq(character_id)),
) )
.set(inventory) .set(inventory)
.execute(connection) .execute(connection)
{ {
warn!( warn!(
"Failed to update inventory for character: {:?}: {:?}", ?e,
character_id, error ?character_id,
"Failed to update inventory for character",
) )
} }
if let Err(error) = diesel::update( if let Err(e) = diesel::update(
schema::loadout::table.filter(schema::loadout::character_id.eq(character_id)), schema::loadout::table.filter(schema::loadout::character_id.eq(character_id)),
) )
.set(loadout) .set(loadout)
.execute(connection) .execute(connection)
{ {
warn!( warn!(?e, ?character_id, "Failed to update loadout for character",)
"Failed to update loadout for character: {:?}: {:?}",
character_id, error
)
} }
} }
impl Drop for CharacterUpdater { impl Drop for CharacterUpdater {
fn drop(&mut self) { fn drop(&mut self) {
drop(self.update_tx.take()); drop(self.update_tx.take());
if let Err(err) = self.handle.take().unwrap().join() { if let Err(e) = self.handle.take().unwrap().join() {
error!("Error from joining character update thread: {:?}", err); error!(?e, "Error from joining character update thread");
} }
} }
} }

View File

@ -31,16 +31,16 @@ fn establish_connection(db_dir: &str) -> SqliteConnection {
// Use Write-Ahead-Logging for improved concurrency: https://sqlite.org/wal.html // Use Write-Ahead-Logging for improved concurrency: https://sqlite.org/wal.html
// Set a busy timeout (in ms): https://sqlite.org/c3ref/busy_timeout.html // Set a busy timeout (in ms): https://sqlite.org/c3ref/busy_timeout.html
if let Err(error) = connection.batch_execute( if let Err(e) = connection.batch_execute(
" "
PRAGMA journal_mode = WAL; PRAGMA journal_mode = WAL;
PRAGMA busy_timeout = 250; PRAGMA busy_timeout = 250;
", ",
) { ) {
warn!( warn!(
?e,
"Failed adding PRAGMA statements while establishing sqlite connection, this will \ "Failed adding PRAGMA statements while establishing sqlite connection, this will \
result in a higher likelihood of locking errors: {}", result in a higher likelihood of locking errors"
error
); );
} }
@ -49,8 +49,8 @@ fn establish_connection(db_dir: &str) -> SqliteConnection {
#[allow(clippy::single_match)] // TODO: Pending review in #587 #[allow(clippy::single_match)] // TODO: Pending review in #587
fn apply_saves_dir_override(db_dir: &str) -> String { fn apply_saves_dir_override(db_dir: &str) -> String {
if let Some(val) = env::var_os("VELOREN_SAVES_DIR") { if let Some(saves_dir) = env::var_os("VELOREN_SAVES_DIR") {
let path = PathBuf::from(val); let path = PathBuf::from(saves_dir.clone());
if path.exists() || path.parent().map(|x| x.exists()).unwrap_or(false) { if path.exists() || path.parent().map(|x| x.exists()).unwrap_or(false) {
// Only allow paths with valid unicode characters // Only allow paths with valid unicode characters
match path.to_str() { match path.to_str() {
@ -58,7 +58,7 @@ fn apply_saves_dir_override(db_dir: &str) -> String {
None => {}, None => {},
} }
} }
warn!("VELOREN_SAVES_DIR points to an invalid path."); warn!(?saves_dir, "VELOREN_SAVES_DIR points to an invalid path.");
} }
db_dir.to_string() db_dir.to_string()
} }

View File

@ -173,8 +173,8 @@ where
match serde_json::from_str(&t) { match serde_json::from_str(&t) {
Ok(data) => Ok(Self(data)), Ok(data) => Ok(Self(data)),
Err(error) => { Err(e) => {
warn!("Failed to deserialise inventory data: {}", error); warn!(?e, "Failed to deserialise inventory data");
Ok(Self(comp::Inventory::default())) Ok(Self(comp::Inventory::default()))
}, },
} }
@ -259,8 +259,8 @@ where
match serde_json::from_str(&t) { match serde_json::from_str(&t) {
Ok(data) => Ok(Self(data)), Ok(data) => Ok(Self(data)),
Err(error) => { Err(e) => {
warn!("Failed to deserialise loadout data: {}", error); warn!(?e, "Failed to deserialise loadout data");
// We don't have a weapon reference here, so we default to sword // We don't have a weapon reference here, so we default to sword
let loadout = LoadoutBuilder::new() let loadout = LoadoutBuilder::new()

View File

@ -72,7 +72,7 @@ impl ServerSettings {
match ron::de::from_reader(file) { match ron::de::from_reader(file) {
Ok(x) => x, Ok(x) => x,
Err(e) => { Err(e) => {
warn!("Failed to parse setting file! Fallback to default. {}", e); warn!(?e, "Failed to parse setting file! Fallback to default");
Self::default() Self::default()
}, },
} }
@ -80,7 +80,7 @@ impl ServerSettings {
let default_settings = Self::default(); let default_settings = Self::default();
match default_settings.save_to_file() { match default_settings.save_to_file() {
Err(e) => error!("Failed to create default setting file! {}", e), Err(e) => error!(?e, "Failed to create default setting file!"),
_ => {}, _ => {},
} }
default_settings default_settings

View File

@ -172,13 +172,11 @@ impl StateExt for State {
self.write_component(entity, inventory); self.write_component(entity, inventory);
self.write_component(entity, loadout); self.write_component(entity, loadout);
}, },
Err(error) => { Err(e) => {
warn!( warn!(
"{}", ?e,
format!( ?character_id,
"Failed to load character data for character_id {}: {}", "Failed to load character data for character_id"
character_id, error
)
); );
if let Some(client) = self.ecs().write_storage::<Client>().get_mut(entity) { if let Some(client) = self.ecs().write_storage::<Client>().get_mut(entity) {
@ -295,6 +293,8 @@ impl StateExt for State {
// and then deleted before the region manager had a chance to assign it a // and then deleted before the region manager had a chance to assign it a
// region // region
warn!( warn!(
?uid,
?pos,
"Failed to find region containing entity during entity deletion, assuming \ "Failed to find region containing entity during entity deletion, assuming \
it wasn't sent to any clients and so deletion doesn't need to be \ it wasn't sent to any clients and so deletion doesn't need to be \
recorded for sync purposes" recorded for sync purposes"

View File

@ -275,11 +275,11 @@ impl<'a> System<'a> for Sys {
| ClientState::Spectator | ClientState::Spectator
| ClientState::Character => match validate_chat_msg(&message) { | ClientState::Character => match validate_chat_msg(&message) {
Ok(()) => new_chat_msgs.push((Some(entity), ServerMsg::chat(message))), Ok(()) => new_chat_msgs.push((Some(entity), ServerMsg::chat(message))),
Err(ChatMsgValidationError::TooLong) => warn!( Err(ChatMsgValidationError::TooLong) => {
"Recieved a chat message that's too long (max:{} len:{})", let max = MAX_BYTES_CHAT_MSG;
MAX_BYTES_CHAT_MSG, let len = message.len();
message.len() warn!(?len, ?max, "Recieved a chat message that's too long")
), },
}, },
ClientState::Pending => {}, ClientState::Pending => {},
}, },

View File

@ -259,14 +259,15 @@ pub fn initialize_region_subscription(world: &World, entity: specs::Entity) {
} }
} }
if let Err(err) = world.write_storage().insert(entity, RegionSubscription { if let Err(e) = world.write_storage().insert(entity, RegionSubscription {
fuzzy_chunk, fuzzy_chunk,
regions, regions,
}) { }) {
error!("Failed to insert region subscription component: {:?}", err); error!(?e, "Failed to insert region subscription component");
} }
} else { } else {
debug!( debug!(
?entity,
"Failed to initialize region subcription. Couldn't retrieve all the neccesary \ "Failed to initialize region subcription. Couldn't retrieve all the neccesary \
components on the provided entity" components on the provided entity"
); );

View File

@ -72,8 +72,8 @@ authc = { git = "https://gitlab.com/veloren/auth.git", rev = "223a4097f7ebc8d451
const-tweaker = { version = "0.2.5", optional = true } const-tweaker = { version = "0.2.5", optional = true }
# Logging # Logging
tracing = { version = "0.1", default-features = false , features = ["attributes"] } tracing = { version = "0.1", default-features = false }
tracing-subscriber = { version = "0.2.3", default-features = false, features = ["env-filter", "fmt", "chrono", "ansi", "smallvec", "registry", "tracing-log"] } tracing-subscriber = { version = "0.2.3", default-features = false, features = ["env-filter", "fmt", "chrono", "ansi", "smallvec" , "tracing-log"] }
tracing-log = "0.1.1" tracing-log = "0.1.1"
tracing-appender = "0.1" tracing-appender = "0.1"

View File

@ -62,14 +62,10 @@ pub fn init() {
modified_paths.insert(path); modified_paths.insert(path);
} }
let mut info = "Hot reloading animations because these files were modified:".to_owned(); warn!(
for path in std::mem::take(&mut modified_paths) { ?modified_paths,
info.push('\n'); "Hot reloading animations because these files were modified"
info.push('\"'); );
info.push_str(&path);
info.push('\"');
}
warn!("{}", info);
// Reload // Reload
reload(); reload();

View File

@ -86,14 +86,14 @@ pub trait Skeleton: Send + Sync + 'static {
let lib = &lock.as_ref().unwrap().lib; let lib = &lock.as_ref().unwrap().lib;
let compute_fn: libloading::Symbol<fn(&Self) -> ([FigureBoneData; 16], Vec3<f32>)> = let compute_fn: libloading::Symbol<fn(&Self) -> ([FigureBoneData; 16], Vec3<f32>)> =
unsafe { lib.get(Self::COMPUTE_FN) }.unwrap_or_else(|err| { unsafe { lib.get(Self::COMPUTE_FN) }.unwrap_or_else(|e| {
panic!( panic!(
"Trying to use: {} but had error: {:?}", "Trying to use: {} but had error: {:?}",
CStr::from_bytes_with_nul(Self::COMPUTE_FN) CStr::from_bytes_with_nul(Self::COMPUTE_FN)
.map(CStr::to_str) .map(CStr::to_str)
.unwrap() .unwrap()
.unwrap(), .unwrap(),
err e
) )
}); });
@ -154,14 +154,14 @@ pub trait Animation {
//println!("{}", start.elapsed().as_nanos()); //println!("{}", start.elapsed().as_nanos());
f f
} }
.unwrap_or_else(|err| { .unwrap_or_else(|e| {
panic!( panic!(
"Trying to use: {} but had error: {:?}", "Trying to use: {} but had error: {:?}",
CStr::from_bytes_with_nul(Self::UPDATE_FN) CStr::from_bytes_with_nul(Self::UPDATE_FN)
.map(CStr::to_str) .map(CStr::to_str)
.unwrap() .unwrap()
.unwrap(), .unwrap(),
err e
) )
}); });

View File

@ -53,7 +53,7 @@ impl SoundCache {
.entry(name.to_string()) .entry(name.to_string())
.or_insert_with(|| { .or_insert_with(|| {
Sound::load(name).unwrap_or_else(|_| { Sound::load(name).unwrap_or_else(|_| {
warn!("SoundCache: Failed to load sound: {}", name); warn!(?name, "SoundCache: Failed to load sound");
Sound::empty() Sound::empty()
}) })

View File

@ -46,7 +46,7 @@ impl<'a> System<'a> for Sys {
interpolated interpolated
.insert(entity, Interpolated { pos, ori }) .insert(entity, Interpolated { pos, ori })
.err() .err()
.map(|err| warn!("Error inserting Interpolated component: {}", err)); .map(|e| warn!(?e, "Error inserting Interpolated component"));
} }
// Remove Interpolated component from entities which don't have a position or an // Remove Interpolated component from entities which don't have a position or an
// orientation or a velocity // orientation or a velocity

View File

@ -141,8 +141,8 @@ impl ItemImgs {
// There was no specification in the ron // There was no specification in the ron
None => { None => {
warn!( warn!(
"{:?} has no specified image file (note: hot-reloading won't work here)", ?item_kind,
item_kind "missing specified image file (note: hot-reloading won't work here)",
); );
None None
}, },
@ -161,10 +161,7 @@ fn graceful_load_vox(specifier: &str) -> Arc<DotVoxData> {
match assets::load::<DotVoxData>(full_specifier.as_str()) { match assets::load::<DotVoxData>(full_specifier.as_str()) {
Ok(dot_vox) => dot_vox, Ok(dot_vox) => dot_vox,
Err(_) => { Err(_) => {
error!( error!(?full_specifier, "Could not load vox file for item images",);
"Could not load vox file for item images: {}",
full_specifier
);
assets::load_expect::<DotVoxData>("voxygen.voxel.not_found") assets::load_expect::<DotVoxData>("voxygen.voxel.not_found")
}, },
} }
@ -174,10 +171,7 @@ fn graceful_load_img(specifier: &str) -> Arc<DynamicImage> {
match assets::load::<DynamicImage>(full_specifier.as_str()) { match assets::load::<DynamicImage>(full_specifier.as_str()) {
Ok(img) => img, Ok(img) => img,
Err(_) => { Err(_) => {
error!( error!(?full_specifier, "Could not load image file for item images");
"Could not load image file for item images: {}",
full_specifier
);
assets::load_expect::<DynamicImage>("voxygen.element.not_found") assets::load_expect::<DynamicImage>("voxygen.element.not_found")
}, },
} }

View File

@ -2,7 +2,7 @@ use std::fs;
use crate::settings::Settings; use crate::settings::Settings;
use tracing::{error, info, instrument}; use tracing::{error, info};
use tracing_subscriber::{filter::LevelFilter, prelude::*, registry, EnvFilter}; use tracing_subscriber::{filter::LevelFilter, prelude::*, registry, EnvFilter};
const VOXYGEN_LOG_ENV: &str = "VOXYGEN_LOG"; const VOXYGEN_LOG_ENV: &str = "VOXYGEN_LOG";
@ -20,7 +20,6 @@ const VOXYGEN_LOG_ENV: &str = "VOXYGEN_LOG";
/// following in your environment. /// following in your environment.
/// ///
/// `VOXYGEN_LOG="veloren_voxygen=trace"` /// `VOXYGEN_LOG="veloren_voxygen=trace"`
#[instrument]
pub fn init(settings: &Settings) -> Vec<impl Drop> { pub fn init(settings: &Settings) -> Vec<impl Drop> {
// To hold the guards that we create, they will cause the logs to be // To hold the guards that we create, they will cause the logs to be
// flushed when they're dropped. // flushed when they're dropped.
@ -44,13 +43,14 @@ pub fn init(settings: &Settings) -> Vec<impl Drop> {
// Try to create the log file's parent folders. // Try to create the log file's parent folders.
let log_folders_created = fs::create_dir_all(&settings.log.logs_path); let log_folders_created = fs::create_dir_all(&settings.log.logs_path);
const LOG_FILENAME: &str = "voxygen.log";
match log_folders_created { match log_folders_created {
// If the parent folders were created then attach both a terminal and a // If the parent folders were created then attach both a terminal and a
// file writer to the registry and init it. // file writer to the registry and init it.
Ok(_) => { Ok(_) => {
let file_appender = let file_appender =
tracing_appender::rolling::daily(&settings.log.logs_path, "voxygen.log"); tracing_appender::rolling::daily(&settings.log.logs_path, LOG_FILENAME);
let (non_blocking_file, _file_guard) = tracing_appender::non_blocking(file_appender); let (non_blocking_file, _file_guard) = tracing_appender::non_blocking(file_appender);
_guards.push(_file_guard); _guards.push(_file_guard);
registry() registry()
@ -58,13 +58,14 @@ pub fn init(settings: &Settings) -> Vec<impl Drop> {
.with(tracing_subscriber::fmt::layer().with_writer(non_blocking_file)) .with(tracing_subscriber::fmt::layer().with_writer(non_blocking_file))
.with(filter) .with(filter)
.init(); .init();
info!("Setup terminal and file logging."); let logdir = &settings.log.logs_path;
info!(?logdir, "Setup terminal and file logging.");
}, },
// Otherwise just add a terminal writer and init it. // Otherwise just add a terminal writer and init it.
Err(e) => { Err(e) => {
error!( error!(
"Failed to create log file! {}. Falling back to terminal logging only.", ?e,
e "Failed to create log file!. Falling back to terminal logging only.",
); );
registry() registry()
.with(tracing_subscriber::fmt::layer().with_writer(non_blocking)) .with(tracing_subscriber::fmt::layer().with_writer(non_blocking))

View File

@ -16,9 +16,8 @@ use veloren_voxygen::{
use common::assets::{load, load_expect}; use common::assets::{load, load_expect};
use std::{mem, panic}; use std::{mem, panic};
use tracing::{debug, error, instrument, warn}; use tracing::{debug, error, warn};
#[instrument]
fn main() { fn main() {
#[cfg(feature = "tweak")] #[cfg(feature = "tweak")]
const_tweaker::run().expect("Could not run server"); const_tweaker::run().expect("Could not run server");
@ -65,11 +64,12 @@ fn main() {
let localized_strings = load::<VoxygenLocalization>(&i18n_asset_key( let localized_strings = load::<VoxygenLocalization>(&i18n_asset_key(
&global_state.settings.language.selected_language, &global_state.settings.language.selected_language,
)) ))
.unwrap_or_else(|error| { .unwrap_or_else(|e| {
let preferred_language = &global_state.settings.language.selected_language;
warn!( warn!(
"Impossible to load {} language: change to the default language (English) instead. \ ?e,
Source error: {:?}", ?preferred_language,
&global_state.settings.language.selected_language, error "Impossible to load language: change to the default language (English) instead.",
); );
global_state.settings.language.selected_language = i18n::REFERENCE_LANG.to_owned(); global_state.settings.language.selected_language = i18n::REFERENCE_LANG.to_owned();
load_expect::<VoxygenLocalization>(&i18n_asset_key( load_expect::<VoxygenLocalization>(&i18n_asset_key(
@ -161,9 +161,10 @@ fn main() {
// Set up the initial play state. // Set up the initial play state.
let mut states: Vec<Box<dyn PlayState>> = vec![Box::new(MainMenuState::new(&mut global_state))]; let mut states: Vec<Box<dyn PlayState>> = vec![Box::new(MainMenuState::new(&mut global_state))];
states states.last().map(|current_state| {
.last() let current_state = current_state.name();
.map(|current_state| debug!("Started game with state '{}'", current_state.name())); debug!(?current_state, "Started game with state")
});
// What's going on here? // What's going on here?
// --------------------- // ---------------------
@ -185,7 +186,8 @@ fn main() {
debug!("Shutting down all states..."); debug!("Shutting down all states...");
while states.last().is_some() { while states.last().is_some() {
states.pop().map(|old_state| { states.pop().map(|old_state| {
debug!("Popped state '{}'.", old_state.name()); let old_state = old_state.name();
debug!(?old_state, "Popped state");
global_state.on_play_state_changed(); global_state.on_play_state_changed();
}); });
} }
@ -193,7 +195,8 @@ fn main() {
PlayStateResult::Pop => { PlayStateResult::Pop => {
direction = Direction::Backwards; direction = Direction::Backwards;
states.pop().map(|old_state| { states.pop().map(|old_state| {
debug!("Popped state '{}'.", old_state.name()); let old_state = old_state.name();
debug!(?old_state, "Popped state");
global_state.on_play_state_changed(); global_state.on_play_state_changed();
}); });
}, },
@ -203,15 +206,13 @@ fn main() {
states.push(new_state); states.push(new_state);
global_state.on_play_state_changed(); global_state.on_play_state_changed();
}, },
PlayStateResult::Switch(mut new_state) => { PlayStateResult::Switch(mut new_state_box) => {
direction = Direction::Forwards; direction = Direction::Forwards;
states.last_mut().map(|old_state| { states.last_mut().map(|old_state_box| {
debug!( let old_state = old_state_box.name();
"Switching to state '{}' from state '{}'.", let new_state = new_state_box.name();
new_state.name(), debug!(?old_state, ?new_state, "Switching to states",);
old_state.name() mem::swap(old_state_box, &mut new_state_box);
);
mem::swap(old_state, &mut new_state);
global_state.on_play_state_changed(); global_state.on_play_state_changed();
}); });
}, },

View File

@ -156,14 +156,14 @@ impl PlayState for CharSelectionState {
let localized_strings = assets::load_expect::<VoxygenLocalization>(&i18n_asset_key( let localized_strings = assets::load_expect::<VoxygenLocalization>(&i18n_asset_key(
&global_state.settings.language.selected_language, &global_state.settings.language.selected_language,
)); ));
if let Err(err) = self.client.borrow_mut().tick( if let Err(e) = self.client.borrow_mut().tick(
comp::ControllerInputs::default(), comp::ControllerInputs::default(),
clock.get_last_delta(), clock.get_last_delta(),
|_| {}, |_| {},
) { ) {
global_state.info_message = global_state.info_message =
Some(localized_strings.get("common.connection_lost").to_owned()); Some(localized_strings.get("common.connection_lost").to_owned());
error!("[char_selection] Failed to tick the scene: {:?}", err); error!(?e, "[char_selection] Failed to tick the scene");
return PlayStateResult::Pop; return PlayStateResult::Pop;
} }

View File

@ -75,10 +75,10 @@ impl PlayState for MainMenuState {
std::rc::Rc::new(std::cell::RefCell::new(client)), std::rc::Rc::new(std::cell::RefCell::new(client)),
))); )));
}, },
Some(InitMsg::Done(Err(err))) => { Some(InitMsg::Done(Err(e))) => {
client_init = None; client_init = None;
global_state.info_message = Some({ global_state.info_message = Some({
let err = match err { let err = match e {
InitError::BadAddress(_) | InitError::NoAddress => { InitError::BadAddress(_) | InitError::NoAddress => {
localized_strings.get("main.login.server_not_found").into() localized_strings.get("main.login.server_not_found").into()
}, },
@ -262,8 +262,8 @@ fn attempt_login(
if !net_settings.servers.contains(&server_address) { if !net_settings.servers.contains(&server_address) {
net_settings.servers.push(server_address.clone()); net_settings.servers.push(server_address.clone());
} }
if let Err(err) = global_state.settings.save_to_file() { if let Err(e) = global_state.settings.save_to_file() {
warn!("Failed to save settings: {:?}", err); warn!(?e, "Failed to save settings");
} }
if comp::Player::alias_is_valid(&username) { if comp::Player::alias_is_valid(&username) {

View File

@ -47,13 +47,13 @@ impl Meta {
} }
}, },
Err(e) => { Err(e) => {
warn!("Failed to parse meta file! Fallback to default. {}", e); warn!(?e, ?file, "Failed to parse meta file! Fallback to default");
// Rename the corrupted settings file // Rename the corrupted settings file
let mut new_path = path.to_owned(); let mut new_path = path.to_owned();
new_path.pop(); new_path.pop();
new_path.push("meta.invalid.ron"); new_path.push("meta.invalid.ron");
if let Err(err) = std::fs::rename(path, new_path) { if let Err(e) = std::fs::rename(path.clone(), new_path.clone()) {
warn!("Failed to rename meta file. {}", err); warn!(?e, ?path, ?new_path, "Failed to rename meta file");
} }
}, },
} }
@ -68,7 +68,7 @@ impl Meta {
pub fn save_to_file_warn(&self) { pub fn save_to_file_warn(&self) {
if let Err(err) = self.save_to_file() { if let Err(err) = self.save_to_file() {
warn!("Failed to save settings: {:?}", err); warn!(?e, "Failed to save settings");
} }
} }
@ -85,12 +85,12 @@ impl Meta {
} }
pub fn get_meta_path() -> PathBuf { pub fn get_meta_path() -> PathBuf {
if let Some(val) = std::env::var_os("VOXYGEN_CONFIG") { if let Some(path) = std::env::var_os("VOXYGEN_CONFIG") {
let meta = PathBuf::from(val).join("meta.ron"); let meta = PathBuf::from(path).join("meta.ron");
if meta.exists() || meta.parent().map(|x| x.exists()).unwrap_or(false) { if meta.exists() || meta.parent().map(|x| x.exists()).unwrap_or(false) {
return meta; return meta;
} }
warn!("VOXYGEN_CONFIG points to invalid path."); warn!(?path, "VOXYGEN_CONFIG points to invalid path.");
} }
let proj_dirs = ProjectDirs::from("net", "veloren", "voxygen") let proj_dirs = ProjectDirs::from("net", "veloren", "voxygen")

View File

@ -66,13 +66,14 @@ impl Profile {
Ok(profile) => return profile, Ok(profile) => return profile,
Err(e) => { Err(e) => {
warn!( warn!(
"Failed to parse profile file! Falling back to default. {}", ?e,
e ?path,
"Failed to parse profile file! Falling back to default."
); );
// Rename the corrupted profile file. // Rename the corrupted profile file.
let new_path = path.with_extension("invalid.ron"); let new_path = path.with_extension("invalid.ron");
if let Err(err) = std::fs::rename(path, new_path) { if let Err(e) = std::fs::rename(path.clone(), new_path.clone()) {
warn!("Failed to rename profile file. {}", err); warn!(?e, ?path, ?new_path, "Failed to rename profile file.");
} }
}, },
} }
@ -87,8 +88,8 @@ impl Profile {
/// Save the current profile to disk, warn on failure. /// Save the current profile to disk, warn on failure.
pub fn save_to_file_warn(&self) { pub fn save_to_file_warn(&self) {
if let Err(err) = self.save_to_file() { if let Err(e) = self.save_to_file() {
warn!("Failed to save profile: {:?}", err); warn!(?e, "Failed to save profile");
} }
} }
@ -156,12 +157,12 @@ impl Profile {
} }
fn get_path() -> PathBuf { fn get_path() -> PathBuf {
if let Some(val) = std::env::var_os("VOXYGEN_CONFIG") { if let Some(path) = std::env::var_os("VOXYGEN_CONFIG") {
let profile = PathBuf::from(val).join("profile.ron"); let profile = PathBuf::from(path.clone()).join("profile.ron");
if profile.exists() || profile.parent().map(|x| x.exists()).unwrap_or(false) { if profile.exists() || profile.parent().map(|x| x.exists()).unwrap_or(false) {
return profile; return profile;
} }
warn!("VOXYGEN_CONFIG points to invalid path."); warn!(?path, "VOXYGEN_CONFIG points to invalid path.");
} }
let proj_dirs = ProjectDirs::from("net", "veloren", "voxygen") let proj_dirs = ProjectDirs::from("net", "veloren", "voxygen")

View File

@ -354,10 +354,7 @@ impl Renderer {
self.postprocess_pipeline = postprocess_pipeline; self.postprocess_pipeline = postprocess_pipeline;
self.player_shadow_pipeline = player_shadow_pipeline; self.player_shadow_pipeline = player_shadow_pipeline;
}, },
Err(e) => error!( Err(e) => error!(?e, "Could not recreate shaders from assets due to an error",),
"Could not recreate shaders from assets due to an error: {:#?}",
e
),
} }
} }

View File

@ -38,7 +38,7 @@ fn graceful_load_vox(mesh_name: &str) -> Arc<DotVoxData> {
match assets::load::<DotVoxData>(full_specifier.as_str()) { match assets::load::<DotVoxData>(full_specifier.as_str()) {
Ok(dot_vox) => dot_vox, Ok(dot_vox) => dot_vox,
Err(_) => { Err(_) => {
error!("Could not load vox file for figure: {}", full_specifier); error!(?full_specifier, "Could not load vox file for figure");
assets::load_expect::<DotVoxData>("voxygen.voxel.not_found") assets::load_expect::<DotVoxData>("voxygen.voxel.not_found")
}, },
} }
@ -157,8 +157,9 @@ impl HumHeadSpec {
Some(spec) => spec, Some(spec) => spec,
None => { None => {
error!( error!(
"No head specification exists for the combination of {:?} and {:?}", ?body.species,
body.species, body.body_type ?body.body_type,
"No head specification exists for the combination of species and body"
); );
return load_mesh("not_found", Vec3::new(-5.0, -5.0, -2.5), generate_mesh); return load_mesh("not_found", Vec3::new(-5.0, -5.0, -2.5), generate_mesh);
}, },
@ -368,7 +369,7 @@ impl HumArmorShoulderSpec {
match self.0.map.get(&shoulder) { match self.0.map.get(&shoulder) {
Some(spec) => spec, Some(spec) => spec,
None => { None => {
error!("No shoulder specification exists for {:?}", shoulder); error!(?shoulder, "No shoulder specification exists");
return load_mesh("not_found", Vec3::new(-3.0, -3.5, 0.1), generate_mesh); return load_mesh("not_found", Vec3::new(-3.0, -3.5, 0.1), generate_mesh);
}, },
} }
@ -450,7 +451,7 @@ impl HumArmorChestSpec {
match self.0.map.get(&chest) { match self.0.map.get(&chest) {
Some(spec) => spec, Some(spec) => spec,
None => { None => {
error!("No chest specification exists for {:?}", loadout.chest); error!(?loadout.chest, "No chest specification exists");
return load_mesh("not_found", Vec3::new(-7.0, -3.5, 2.0), generate_mesh); return load_mesh("not_found", Vec3::new(-7.0, -3.5, 2.0), generate_mesh);
}, },
} }
@ -507,7 +508,7 @@ impl HumArmorHandSpec {
match self.0.map.get(&hand) { match self.0.map.get(&hand) {
Some(spec) => spec, Some(spec) => spec,
None => { None => {
error!("No hand specification exists for {:?}", hand); error!(?hand, "No hand specification exists");
return load_mesh("not_found", Vec3::new(-1.5, -1.5, -7.0), generate_mesh); return load_mesh("not_found", Vec3::new(-1.5, -1.5, -7.0), generate_mesh);
}, },
} }
@ -583,7 +584,7 @@ impl HumArmorBeltSpec {
match self.0.map.get(&belt) { match self.0.map.get(&belt) {
Some(spec) => spec, Some(spec) => spec,
None => { None => {
error!("No belt specification exists for {:?}", belt); error!(?belt, "No belt specification exists");
return load_mesh("not_found", Vec3::new(-4.0, -3.5, 2.0), generate_mesh); return load_mesh("not_found", Vec3::new(-4.0, -3.5, 2.0), generate_mesh);
}, },
} }
@ -627,7 +628,7 @@ impl HumArmorBackSpec {
match self.0.map.get(&back) { match self.0.map.get(&back) {
Some(spec) => spec, Some(spec) => spec,
None => { None => {
error!("No back specification exists for {:?}", back); error!(?back, "No back specification exists");
return load_mesh("not_found", Vec3::new(-4.0, -3.5, 2.0), generate_mesh); return load_mesh("not_found", Vec3::new(-4.0, -3.5, 2.0), generate_mesh);
}, },
} }
@ -670,7 +671,7 @@ impl HumArmorPantsSpec {
match self.0.map.get(&pants) { match self.0.map.get(&pants) {
Some(spec) => spec, Some(spec) => spec,
None => { None => {
error!("No pants specification exists for {:?}", pants); error!(?pants, "No pants specification exists");
return load_mesh("not_found", Vec3::new(-5.0, -3.5, 1.0), generate_mesh); return load_mesh("not_found", Vec3::new(-5.0, -3.5, 1.0), generate_mesh);
}, },
} }
@ -727,7 +728,7 @@ impl HumArmorFootSpec {
match self.0.map.get(&foot) { match self.0.map.get(&foot) {
Some(spec) => spec, Some(spec) => spec,
None => { None => {
error!("No foot specification exists for {:?}", foot); error!(?foot, "No foot specification exists");
return load_mesh("not_found", Vec3::new(-2.5, -3.5, -9.0), generate_mesh); return load_mesh("not_found", Vec3::new(-2.5, -3.5, -9.0), generate_mesh);
}, },
} }
@ -793,7 +794,7 @@ impl HumMainWeaponSpec {
let spec = match self.0.get(tool_kind) { let spec = match self.0.get(tool_kind) {
Some(spec) => spec, Some(spec) => spec,
None => { None => {
error!("No tool/weapon specification exists for {:?}", tool_kind); error!(?tool_kind, "No tool/weapon specification exists");
return load_mesh("not_found", Vec3::new(-1.5, -1.5, -7.0), generate_mesh); return load_mesh("not_found", Vec3::new(-1.5, -1.5, -7.0), generate_mesh);
}, },
}; };
@ -820,7 +821,7 @@ impl HumArmorLanternSpec {
match self.0.map.get(&kind) { match self.0.map.get(&kind) {
Some(spec) => spec, Some(spec) => spec,
None => { None => {
error!("No lantern specification exists for {:?}", kind); error!(?kind, "No lantern specification exists");
return load_mesh("not_found", Vec3::new(-4.0, -3.5, 2.0), generate_mesh); return load_mesh("not_found", Vec3::new(-4.0, -3.5, 2.0), generate_mesh);
}, },
} }
@ -863,7 +864,7 @@ impl HumArmorHeadSpec {
match self.0.map.get(&head) { match self.0.map.get(&head) {
Some(spec) => spec, Some(spec) => spec,
None => { None => {
error!("No head specification exists for {:?}", head); error!(?head, "No head specification exists");
return load_mesh("not_found", Vec3::new(-5.0, -3.5, 1.0), generate_mesh); return load_mesh("not_found", Vec3::new(-5.0, -3.5, 1.0), generate_mesh);
}, },
} }
@ -918,7 +919,7 @@ impl HumArmorTabardSpec {
match self.0.map.get(&tabard) { match self.0.map.get(&tabard) {
Some(spec) => spec, Some(spec) => spec,
None => { None => {
error!("No tabard specification exists for {:?}", tabard); error!(?tabard, "No tabard specification exists");
return load_mesh("not_found", Vec3::new(-5.0, -3.5, 1.0), generate_mesh); return load_mesh("not_found", Vec3::new(-5.0, -3.5, 1.0), generate_mesh);
}, },
} }

View File

@ -169,7 +169,7 @@ impl Scene {
} = self.camera.dependents(); } = self.camera.dependents();
const VD: f32 = 115.0; // View Distance const VD: f32 = 115.0; // View Distance
const TIME: f64 = 43200.0; // 12 hours*3600 seconds const TIME: f64 = 43200.0; // 12 hours*3600 seconds
if let Err(err) = renderer.update_consts(&mut self.globals, &[Globals::new( if let Err(e) = renderer.update_consts(&mut self.globals, &[Globals::new(
view_mat, view_mat,
proj_mat, proj_mat,
cam_pos, cam_pos,
@ -186,7 +186,7 @@ impl Scene {
self.camera.get_mode(), self.camera.get_mode(),
250.0, 250.0,
)]) { )]) {
error!("Renderer failed to update: {:?}", err); error!(?e, "Renderer failed to update");
} }
self.figure_model_cache.clean(scene_data.tick); self.figure_model_cache.clean(scene_data.tick);

View File

@ -718,13 +718,13 @@ impl Settings {
match ron::de::from_reader(file) { match ron::de::from_reader(file) {
Ok(s) => return s, Ok(s) => return s,
Err(e) => { Err(e) => {
warn!("Failed to parse setting file! Fallback to default. {}", e); warn!(?e, "Failed to parse setting file! Fallback to default.");
// Rename the corrupted settings file // Rename the corrupted settings file
let mut new_path = path.to_owned(); let mut new_path = path.to_owned();
new_path.pop(); new_path.pop();
new_path.push("settings.invalid.ron"); new_path.push("settings.invalid.ron");
if let Err(err) = std::fs::rename(path, new_path) { if let Err(e) = std::fs::rename(path.clone(), new_path.clone()) {
warn!("Failed to rename settings file. {}", err); warn!(?e, ?path, ?new_path, "Failed to rename settings file.");
} }
}, },
} }
@ -738,8 +738,8 @@ impl Settings {
} }
pub fn save_to_file_warn(&self) { pub fn save_to_file_warn(&self) {
if let Err(err) = self.save_to_file() { if let Err(e) = self.save_to_file() {
warn!("Failed to save settings: {:?}", err); warn!(?e, "Failed to save settings");
} }
} }
@ -756,12 +756,12 @@ impl Settings {
} }
pub fn get_settings_path() -> PathBuf { pub fn get_settings_path() -> PathBuf {
if let Some(val) = std::env::var_os("VOXYGEN_CONFIG") { if let Some(path) = std::env::var_os("VOXYGEN_CONFIG") {
let settings = PathBuf::from(val).join("settings.ron"); let settings = PathBuf::from(path.clone()).join("settings.ron");
if settings.exists() || settings.parent().map(|x| x.exists()).unwrap_or(false) { if settings.exists() || settings.parent().map(|x| x.exists()).unwrap_or(false) {
return settings; return settings;
} }
warn!("VOXYGEN_CONFIG points to invalid path."); warn!(?path, "VOXYGEN_CONFIG points to invalid path.");
} }
let proj_dirs = ProjectDirs::from("net", "veloren", "voxygen") let proj_dirs = ProjectDirs::from("net", "veloren", "voxygen")

View File

@ -43,7 +43,7 @@ const ATLAS_CUTTOFF_FRAC: f32 = 0.2;
/// Multiplied by current window size /// Multiplied by current window size
const GRAPHIC_CACHE_RELATIVE_SIZE: u16 = 1; const GRAPHIC_CACHE_RELATIVE_SIZE: u16 = 1;
#[derive(PartialEq, Eq, Hash, Copy, Clone)] #[derive(PartialEq, Eq, Hash, Copy, Clone, Debug)]
pub struct Id(u32); pub struct Id(u32);
// TODO these can become invalid when clearing the cache // TODO these can become invalid when clearing the cache
@ -300,7 +300,10 @@ fn draw_graphic(graphic_map: &GraphicMap, graphic_id: Id, dims: Vec2<u16>) -> Op
*sample_strat, *sample_strat,
)), )),
None => { None => {
warn!("A graphic was requested via an id which is not in use"); warn!(
?graphic_id,
"A graphic was requested via an id which is not in use"
);
None None
}, },
} }
@ -333,12 +336,12 @@ fn aabr_from_alloc_rect(rect: guillotiere::Rectangle) -> Aabr<u16> {
fn upload_image(renderer: &mut Renderer, aabr: Aabr<u16>, tex: &Texture, image: &RgbaImage) { fn upload_image(renderer: &mut Renderer, aabr: Aabr<u16>, tex: &Texture, image: &RgbaImage) {
let offset = aabr.min.into_array(); let offset = aabr.min.into_array();
let size = aabr.size().into_array(); let size = aabr.size().into_array();
if let Err(err) = renderer.update_texture( if let Err(e) = renderer.update_texture(
tex, tex,
offset, offset,
size, size,
&image.pixels().map(|p| p.0).collect::<Vec<[u8; 4]>>(), &image.pixels().map(|p| p.0).collect::<Vec<[u8; 4]>>(),
) { ) {
warn!("Failed to update texture: {:?}", err); warn!(?e, "Failed to update texture");
} }
} }

View File

@ -443,11 +443,11 @@ impl Window {
); );
None None
}, },
Err(gilrs::Error::Other(err)) => { Err(gilrs::Error::Other(e)) => {
error!( error!(
"Platform-specific error when creating a Gilrs instance: `{}`. Falling back \ ?e,
to no controller support.", "Platform-specific error when creating a Gilrs instance. Falling back to no \
err controller support."
); );
None None
}, },
@ -962,8 +962,8 @@ impl Window {
use std::time::SystemTime; use std::time::SystemTime;
// Check if folder exists and create it if it does not // Check if folder exists and create it if it does not
if !path.exists() { if !path.exists() {
if let Err(err) = std::fs::create_dir_all(&path) { if let Err(e) = std::fs::create_dir_all(&path) {
warn!("Couldn't create folder for screenshot: {:?}", err); warn!(?e, "Couldn't create folder for screenshot");
let _result = let _result =
sender.send(String::from("Couldn't create folder for screenshot")); sender.send(String::from("Couldn't create folder for screenshot"));
} }
@ -975,8 +975,8 @@ impl Window {
.map(|d| d.as_millis()) .map(|d| d.as_millis())
.unwrap_or(0) .unwrap_or(0)
)); ));
if let Err(err) = img.save(&path) { if let Err(e) = img.save(&path) {
warn!("Couldn't save screenshot: {:?}", err); warn!(?e, "Couldn't save screenshot");
let _result = sender.send(String::from("Couldn't save screenshot")); let _result = sender.send(String::from("Couldn't save screenshot"));
} else { } else {
let _result = let _result =
@ -984,10 +984,7 @@ impl Window {
} }
}); });
}, },
Err(err) => error!( Err(e) => error!(?e, "Couldn't create screenshot due to renderer error"),
"Couldn't create screenshot due to renderer error: {:?}",
err
),
} }
} }

View File

@ -17,7 +17,7 @@ num = "0.2.0"
ordered-float = "1.0" ordered-float = "1.0"
hashbrown = { version = "0.6", features = ["rayon", "serde", "nightly"] } hashbrown = { version = "0.6", features = ["rayon", "serde", "nightly"] }
lazy_static = "1.4.0" lazy_static = "1.4.0"
tracing = { version = "0.1", default-features = false , features = ["attributes"] } tracing = { version = "0.1", default-features = false }
rand = "0.7" rand = "0.7"
rand_chacha = "0.2.1" rand_chacha = "0.2.1"
arr_macro = "0.1.2" arr_macro = "0.1.2"

View File

@ -114,8 +114,8 @@ fn main() {
.expect("Image dimensions must be valid"); .expect("Image dimensions must be valid");
let mut path = PathBuf::from("./screenshots"); let mut path = PathBuf::from("./screenshots");
if !path.exists() { if !path.exists() {
if let Err(err) = std::fs::create_dir(&path) { if let Err(e) = std::fs::create_dir(&path) {
warn!("Couldn't create folder for screenshot: {:?}", err); warn!(?e, ?path, "Couldn't create folder for screenshot");
} }
} }
path.push(format!( path.push(format!(
@ -125,8 +125,8 @@ fn main() {
.map(|d| d.as_millis()) .map(|d| d.as_millis())
.unwrap_or(0) .unwrap_or(0)
)); ));
if let Err(err) = world_map.save(&path) { if let Err(e) = world_map.save(&path) {
warn!("Couldn't save screenshot: {:?}", err); warn!(?e, ?path, "Couldn't save screenshot");
} }
} }
} }

View File

@ -187,7 +187,7 @@ impl Civs {
.get_mut(pos) .get_mut(pos)
.map(|chunk| chunk.sites.push(world_site.clone())); .map(|chunk| chunk.sites.push(world_site.clone()));
} }
info!("Placed site at {:?}", site.center); info!(?site.center, "Placed site at location");
} }
//this.display_info(); //this.display_info();

View File

@ -214,7 +214,7 @@ impl<'a> Sampler<'a> for ColumnGen<'a> {
} else { } else {
match kind { match kind {
RiverKind::River { .. } => { RiverKind::River { .. } => {
error!("What? River: {:?}, Pos: {:?}", river, posj); error!(?river, ?posj, "What?");
panic!("How can a river have no downhill?"); panic!("How can a river have no downhill?");
}, },
RiverKind::Lake { .. } => { RiverKind::Lake { .. } => {
@ -619,8 +619,10 @@ impl<'a> Sampler<'a> for ColumnGen<'a> {
dist dist
} else { } else {
error!( error!(
"Ocean: {:?} Here: {:?}, Ocean: {:?}", ?max_border_river,
max_border_river, chunk_pos, max_border_river_pos ?chunk_pos,
?max_border_river_pos,
"downhill error details"
); );
panic!( panic!(
"Oceans should definitely have a downhill! \ "Oceans should definitely have a downhill! \

View File

@ -724,7 +724,7 @@ fn erode(
// NOTE: The value being divided by here sets the effective maximum uplift rate, // NOTE: The value being divided by here sets the effective maximum uplift rate,
// as everything is scaled to it! // as everything is scaled to it!
let dt = max_uplift as f64 / 1e-3; let dt = max_uplift as f64 / 1e-3;
debug!("dt={:?}", dt); debug!(?dt, "");
// Minimum sediment thickness before we treat erosion as sediment based. // Minimum sediment thickness before we treat erosion as sediment based.
let sediment_thickness = |_n| /*6.25e-5*/1.0e-4 * dt; let sediment_thickness = |_n| /*6.25e-5*/1.0e-4 * dt;
let neighbor_coef = TerrainChunkSize::RECT_SIZE.map(|e| e as f64); let neighbor_coef = TerrainChunkSize::RECT_SIZE.map(|e| e as f64);

View File

@ -877,8 +877,8 @@ impl WorldSim {
FileOpts::LoadLegacy(ref path) => { FileOpts::LoadLegacy(ref path) => {
let file = match File::open(path) { let file = match File::open(path) {
Ok(file) => file, Ok(file) => file,
Err(err) => { Err(e) => {
warn!("Couldn't read path for maps: {:?}", err); warn!(?e, ?path, "Couldn't read path for maps");
return None; return None;
}, },
}; };
@ -886,11 +886,11 @@ impl WorldSim {
let reader = BufReader::new(file); let reader = BufReader::new(file);
let map: WorldFileLegacy = match bincode::deserialize_from(reader) { let map: WorldFileLegacy = match bincode::deserialize_from(reader) {
Ok(map) => map, Ok(map) => map,
Err(err) => { Err(e) => {
warn!( warn!(
"Couldn't parse legacy map: {:?}). Maybe you meant to try a \ ?e,
regular load?", "Couldn't parse legacy map. Maybe you meant to try a regular \
err load?"
); );
return None; return None;
}, },
@ -901,8 +901,8 @@ impl WorldSim {
FileOpts::Load(ref path) => { FileOpts::Load(ref path) => {
let file = match File::open(path) { let file = match File::open(path) {
Ok(file) => file, Ok(file) => file,
Err(err) => { Err(e) => {
warn!("Couldn't read path for maps: {:?}", err); warn!(?e, ?path, "Couldn't read path for maps");
return None; return None;
}, },
}; };
@ -910,11 +910,10 @@ impl WorldSim {
let reader = BufReader::new(file); let reader = BufReader::new(file);
let map: WorldFile = match bincode::deserialize_from(reader) { let map: WorldFile = match bincode::deserialize_from(reader) {
Ok(map) => map, Ok(map) => map,
Err(err) => { Err(e) => {
warn!( warn!(
"Couldn't parse modern map: {:?}). Maybe you meant to try a \ ?e,
legacy load?", "Couldn't parse modern map. Maybe you meant to try a legacy load?"
err
); );
return None; return None;
}, },
@ -925,22 +924,18 @@ impl WorldSim {
FileOpts::LoadAsset(ref specifier) => { FileOpts::LoadAsset(ref specifier) => {
let reader = match assets::load_file(specifier, &["bin"]) { let reader = match assets::load_file(specifier, &["bin"]) {
Ok(reader) => reader, Ok(reader) => reader,
Err(err) => { Err(e) => {
warn!( warn!(?e, ?specifier, "Couldn't read asset specifier for maps",);
"Couldn't read asset specifier {:?} for maps: {:?}",
specifier, err
);
return None; return None;
}, },
}; };
let map: WorldFile = match bincode::deserialize_from(reader) { let map: WorldFile = match bincode::deserialize_from(reader) {
Ok(map) => map, Ok(map) => map,
Err(err) => { Err(e) => {
warn!( warn!(
"Couldn't parse modern map: {:?}). Maybe you meant to try a \ ?e,
legacy load?", "Couldn't parse modern map. Maybe you meant to try a legacy load?"
err
); );
return None; return None;
}, },
@ -1026,8 +1021,8 @@ impl WorldSim {
// Check if folder exists and create it if it does not // Check if folder exists and create it if it does not
let mut path = PathBuf::from("./maps"); let mut path = PathBuf::from("./maps");
if !path.exists() { if !path.exists() {
if let Err(err) = std::fs::create_dir(&path) { if let Err(e) = std::fs::create_dir(&path) {
warn!("Couldn't create folder for map: {:?}", err); warn!(?e, ?path, "Couldn't create folder for map");
return; return;
} }
} }
@ -1039,17 +1034,17 @@ impl WorldSim {
.map(|d| d.as_millis()) .map(|d| d.as_millis())
.unwrap_or(0) .unwrap_or(0)
)); ));
let file = match File::create(path) { let file = match File::create(path.clone()) {
Ok(file) => file, Ok(file) => file,
Err(err) => { Err(e) => {
warn!("Couldn't create file for maps: {:?}", err); warn!(?e, ?path, "Couldn't create file for maps");
return; return;
}, },
}; };
let writer = BufWriter::new(file); let writer = BufWriter::new(file);
if let Err(err) = bincode::serialize_into(writer, &map) { if let Err(e) = bincode::serialize_into(writer, &map) {
warn!("Couldn't write map: {:?}", err); warn!(?e, "Couldn't write map");
} }
} }
})(); })();
@ -1088,7 +1083,7 @@ impl WorldSim {
let is_ocean_fn = |posi: usize| is_ocean[posi]; let is_ocean_fn = |posi: usize| is_ocean[posi];
let mut dh = downhill(|posi| alt[posi], is_ocean_fn); let mut dh = downhill(|posi| alt[posi], is_ocean_fn);
let (boundary_len, indirection, water_alt_pos, maxh) = get_lakes(|posi| alt[posi], &mut dh); let (boundary_len, indirection, water_alt_pos, maxh) = get_lakes(|posi| alt[posi], &mut dh);
debug!("Max height: {:?}", maxh); debug!(?maxh, "Max height");
let (mrec, mstack, mwrec) = { let (mrec, mstack, mwrec) = {
let mut wh = vec![0.0; WORLD_SIZE.x * WORLD_SIZE.y]; let mut wh = vec![0.0; WORLD_SIZE.x * WORLD_SIZE.y];
get_multi_rec( get_multi_rec(
@ -1911,10 +1906,9 @@ impl SimChunk {
); */ ); */
} }
if river_slope.abs() >= 0.25 && cross_section.x >= 1.0 { if river_slope.abs() >= 0.25 && cross_section.x >= 1.0 {
debug!( let pos_area = wposf;
"Big waterfall! Pos area: {:?}, River data: {:?}, slope: {:?}", let river_data = &river;
wposf, river, river_slope debug!(?pos_area, ?river_data, ?river_slope, "Big waterfall!",);
);
} }
}, },
Some(RiverKind::Lake { .. }) => { Some(RiverKind::Lake { .. }) => {