mirror of
https://gitlab.com/veloren/veloren.git
synced 2024-08-30 18:12:32 +00:00
Address further MR 2301 comments.
This commit is contained in:
parent
9c4453508e
commit
42dc86788e
@ -47,8 +47,6 @@ pub struct VoxelMinimap {
|
||||
image_id: img_ids::Rotations,
|
||||
last_pos: Vec3<i32>,
|
||||
last_ceiling: i32,
|
||||
/// Maximum z of the top of the tallest loaded chunk (for ceiling pruning)
|
||||
max_chunk_z: i32,
|
||||
keyed_jobs: KeyedJobs<Vec2<i32>, MinimapColumn>,
|
||||
}
|
||||
|
||||
@ -70,8 +68,7 @@ impl VoxelMinimap {
|
||||
composited,
|
||||
last_pos: Vec3::zero(),
|
||||
last_ceiling: 0,
|
||||
max_chunk_z: 0,
|
||||
keyed_jobs: KeyedJobs::new(),
|
||||
keyed_jobs: KeyedJobs::new("IMAGE_PROCESSING"),
|
||||
}
|
||||
}
|
||||
|
||||
@ -197,16 +194,19 @@ impl VoxelMinimap {
|
||||
}) {
|
||||
self.chunk_minimaps.insert(key, column);
|
||||
new_chunks = true;
|
||||
self.max_chunk_z = self.max_chunk_z.max(chunk.get_max_z());
|
||||
}
|
||||
}
|
||||
}
|
||||
new_chunks
|
||||
}
|
||||
|
||||
fn remove_unloaded_chunks(&mut self, terrain: &TerrainGrid) {
|
||||
self.chunk_minimaps
|
||||
.retain(|key, _| terrain.get_key(*key).is_some());
|
||||
fn remove_chunks_far(&mut self, terrain: &TerrainGrid, cpos: Vec2<i32>) {
|
||||
self.chunk_minimaps.retain(|key, _| {
|
||||
let delta: Vec2<u32> = (key - cpos).map(i32::abs).as_();
|
||||
delta.x < 1 + VOXEL_MINIMAP_SIDELENGTH / TerrainChunkSize::RECT_SIZE.x
|
||||
&& delta.y < 1 + VOXEL_MINIMAP_SIDELENGTH / TerrainChunkSize::RECT_SIZE.y
|
||||
&& terrain.get_key(*key).is_some()
|
||||
});
|
||||
}
|
||||
|
||||
pub fn maintain(&mut self, client: &Client, ui: &mut Ui) {
|
||||
@ -224,7 +224,7 @@ impl VoxelMinimap {
|
||||
let pool = client.state().ecs().read_resource::<SlowJobPool>();
|
||||
let terrain = client.state().terrain();
|
||||
let new_chunks = self.add_chunks_near(&pool, &terrain, cpos);
|
||||
self.remove_unloaded_chunks(&terrain);
|
||||
self.remove_chunks_far(&terrain, cpos);
|
||||
|
||||
// ceiling_offset is the distance from the player to a block heuristically
|
||||
// detected as the ceiling height (a non-tree solid block above them, or
|
||||
@ -256,10 +256,18 @@ impl VoxelMinimap {
|
||||
.map_or(false, |(_, b)| *b)
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
// if the `find` returned None, there's no solid blocks above the
|
||||
// player within the chunk
|
||||
if above.1 {
|
||||
// if the `above` block is solid, the chunk has an infinite
|
||||
// solid ceiling, and so we render from 1 block above the
|
||||
// player (which is where the player's head is if they're 2
|
||||
// blocks tall)
|
||||
1
|
||||
} else {
|
||||
self.max_chunk_z - pos.z as i32
|
||||
// if the ceiling is a non-solid sky, use the largest value
|
||||
// (subsequent arithmetic on ceiling_offset must be saturating)
|
||||
i32::MAX
|
||||
}
|
||||
})
|
||||
},
|
||||
@ -291,7 +299,7 @@ impl VoxelMinimap {
|
||||
above,
|
||||
below,
|
||||
} = column;
|
||||
if pos.z as i32 + ceiling_offset < *zlo {
|
||||
if (pos.z as i32).saturating_add(ceiling_offset) < *zlo {
|
||||
// If the ceiling is below the bottom of a chunk, color it black,
|
||||
// so that the middles of caves/dungeons don't show the forests
|
||||
// around them.
|
||||
@ -303,7 +311,8 @@ impl VoxelMinimap {
|
||||
// differently-tall trees are handled properly)
|
||||
layers
|
||||
.get(
|
||||
((pos.z as i32 - zlo + ceiling_offset) as usize)
|
||||
(((pos.z as i32 - zlo).saturating_add(ceiling_offset))
|
||||
as usize)
|
||||
.min(layers.len().saturating_sub(1)),
|
||||
)
|
||||
.and_then(|grid| grid.get(cmod).map(|c| c.0.as_()))
|
||||
|
@ -159,7 +159,7 @@ impl GraphicCache {
|
||||
atlases: vec![(atlas, 0)],
|
||||
textures: vec![texture],
|
||||
cache_map: HashMap::default(),
|
||||
keyed_jobs: KeyedJobs::new(),
|
||||
keyed_jobs: KeyedJobs::new("IMAGE_PROCESSING"),
|
||||
}
|
||||
}
|
||||
|
||||
@ -402,7 +402,7 @@ fn draw_graphic(
|
||||
pool: Option<&SlowJobPool>,
|
||||
) -> Option<(RgbaImage, Option<Rgba<f32>>)> {
|
||||
match graphic_map.get(&graphic_id) {
|
||||
// Short-circuit spawning a threadpool for blank graphics
|
||||
// Short-circuit spawning a job on the threadpool for blank graphics
|
||||
Some(Graphic::Blank) => None,
|
||||
Some(inner) => {
|
||||
keyed_jobs
|
||||
|
102
voxygen/src/ui/keyed_jobs.rs
Normal file
102
voxygen/src/ui/keyed_jobs.rs
Normal file
@ -0,0 +1,102 @@
|
||||
use common::slowjob::{SlowJob, SlowJobPool};
|
||||
use hashbrown::{hash_map::Entry, HashMap};
|
||||
use std::{
|
||||
hash::Hash,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
|
||||
enum KeyedJobTask<V> {
|
||||
Pending(Instant, Option<SlowJob>),
|
||||
Completed(Instant, V),
|
||||
}
|
||||
|
||||
pub struct KeyedJobs<K, V> {
|
||||
tx: crossbeam_channel::Sender<(K, V)>,
|
||||
rx: crossbeam_channel::Receiver<(K, V)>,
|
||||
tasks: HashMap<K, KeyedJobTask<V>>,
|
||||
name: &'static str,
|
||||
last_gc: Instant,
|
||||
}
|
||||
|
||||
const KEYEDJOBS_GC_INTERVAL: Duration = Duration::from_secs(1);
|
||||
|
||||
impl<K: Hash + Eq + Send + Sync + 'static + Clone, V: Send + Sync + 'static> KeyedJobs<K, V> {
|
||||
#[allow(clippy::new_without_default)]
|
||||
pub fn new(name: &'static str) -> Self {
|
||||
let (tx, rx) = crossbeam_channel::unbounded();
|
||||
Self {
|
||||
tx,
|
||||
rx,
|
||||
tasks: HashMap::new(),
|
||||
name,
|
||||
last_gc: Instant::now(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Spawn a task on a specified threadpool. The function is given as a thunk
|
||||
/// so that if work is needed to create captured variables (e.g.
|
||||
/// `Arc::clone`), that only occurs if the task hasn't yet been scheduled.
|
||||
pub fn spawn<F: FnOnce(&K) -> V + Send + Sync + 'static>(
|
||||
&mut self,
|
||||
pool: Option<&SlowJobPool>,
|
||||
k: K,
|
||||
f: impl FnOnce() -> F,
|
||||
) -> Option<(K, V)> {
|
||||
if let Some(pool) = pool {
|
||||
while let Ok((k2, v)) = self.rx.try_recv() {
|
||||
if k == k2 {
|
||||
return Some((k, v));
|
||||
} else {
|
||||
self.tasks
|
||||
.insert(k2, KeyedJobTask::Completed(Instant::now(), v));
|
||||
}
|
||||
}
|
||||
let now = Instant::now();
|
||||
if now - self.last_gc > KEYEDJOBS_GC_INTERVAL {
|
||||
self.last_gc = now;
|
||||
self.tasks.retain(|_, task| match task {
|
||||
KeyedJobTask::Completed(at, _) => now - *at < KEYEDJOBS_GC_INTERVAL,
|
||||
KeyedJobTask::Pending(at, job) => {
|
||||
let fresh = now - *at < KEYEDJOBS_GC_INTERVAL;
|
||||
if !fresh {
|
||||
if let Some(job) = job.take() {
|
||||
pool.cancel(job)
|
||||
}
|
||||
}
|
||||
fresh
|
||||
},
|
||||
});
|
||||
}
|
||||
match self.tasks.entry(k.clone()) {
|
||||
Entry::Occupied(e) => {
|
||||
let mut ret = None;
|
||||
e.replace_entry_with(|_, v| {
|
||||
if let KeyedJobTask::Completed(_, v) = v {
|
||||
ret = Some((k, v));
|
||||
None
|
||||
} else {
|
||||
Some(v)
|
||||
}
|
||||
});
|
||||
ret
|
||||
},
|
||||
Entry::Vacant(e) => {
|
||||
// TODO: consider adding a limit to the number of submitted jobs based on the
|
||||
// number of available threads, once SlowJobPool supports a notion of
|
||||
// approximating that
|
||||
let tx = self.tx.clone();
|
||||
let f = f();
|
||||
let job = pool.spawn(self.name, move || {
|
||||
let v = f(&k);
|
||||
let _ = tx.send((k, v));
|
||||
});
|
||||
e.insert(KeyedJobTask::Pending(Instant::now(), Some(job)));
|
||||
None
|
||||
},
|
||||
}
|
||||
} else {
|
||||
let v = f()(&k);
|
||||
Some((k, v))
|
||||
}
|
||||
}
|
||||
}
|
@ -8,9 +8,11 @@ pub mod img_ids;
|
||||
#[macro_use]
|
||||
pub mod fonts;
|
||||
pub mod ice;
|
||||
pub mod keyed_jobs;
|
||||
|
||||
pub use event::Event;
|
||||
pub use graphic::{Graphic, Id as GraphicId, Rotation, SampleStrat, Transform};
|
||||
pub use keyed_jobs::KeyedJobs;
|
||||
pub use scale::{Scale, ScaleMode};
|
||||
pub use widgets::{
|
||||
image_frame::ImageFrame,
|
||||
@ -48,8 +50,8 @@ use conrod_core::{
|
||||
};
|
||||
use core::{convert::TryInto, f32, f64, ops::Range};
|
||||
use graphic::TexId;
|
||||
use hashbrown::{hash_map::Entry, HashMap};
|
||||
use std::{hash::Hash, time::Duration};
|
||||
use hashbrown::hash_map::Entry;
|
||||
use std::time::Duration;
|
||||
use tracing::{error, warn};
|
||||
use vek::*;
|
||||
|
||||
@ -1054,73 +1056,3 @@ fn default_scissor(renderer: &Renderer) -> Aabr<u16> {
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
enum KeyedJobTask<V> {
|
||||
Pending,
|
||||
Completed(V),
|
||||
}
|
||||
|
||||
pub struct KeyedJobs<K, V> {
|
||||
tx: crossbeam_channel::Sender<(K, V)>,
|
||||
rx: crossbeam_channel::Receiver<(K, V)>,
|
||||
tasks: HashMap<K, KeyedJobTask<V>>,
|
||||
}
|
||||
|
||||
impl<K: Hash + Eq + Send + Sync + 'static + Clone, V: Send + Sync + 'static> KeyedJobs<K, V> {
|
||||
#[allow(clippy::new_without_default)]
|
||||
pub fn new() -> Self {
|
||||
let (tx, rx) = crossbeam_channel::unbounded();
|
||||
Self {
|
||||
tx,
|
||||
rx,
|
||||
tasks: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Spawn a task on a specified threadpool. The function is given as a thunk
|
||||
/// so that if work is needed to create captured variables (e.g.
|
||||
/// `Arc::clone`), that only occurs if the task hasn't yet been scheduled.
|
||||
pub fn spawn<F: FnOnce(&K) -> V + Send + Sync + 'static>(
|
||||
&mut self,
|
||||
pool: Option<&SlowJobPool>,
|
||||
k: K,
|
||||
f: impl FnOnce() -> F,
|
||||
) -> Option<(K, V)> {
|
||||
if let Some(pool) = pool {
|
||||
while let Ok((k2, v)) = self.rx.try_recv() {
|
||||
if k == k2 {
|
||||
return Some((k, v));
|
||||
} else {
|
||||
self.tasks.insert(k2, KeyedJobTask::Completed(v));
|
||||
}
|
||||
}
|
||||
match self.tasks.entry(k.clone()) {
|
||||
Entry::Occupied(e) => {
|
||||
let mut ret = None;
|
||||
e.replace_entry_with(|_, v| {
|
||||
if let KeyedJobTask::Completed(v) = v {
|
||||
ret = Some((k, v));
|
||||
None
|
||||
} else {
|
||||
Some(v)
|
||||
}
|
||||
});
|
||||
ret
|
||||
},
|
||||
Entry::Vacant(e) => {
|
||||
let tx = self.tx.clone();
|
||||
let f = f();
|
||||
pool.spawn("IMAGE_PROCESSING", move || {
|
||||
let v = f(&k);
|
||||
let _ = tx.send((k, v));
|
||||
});
|
||||
e.insert(KeyedJobTask::Pending);
|
||||
None
|
||||
},
|
||||
}
|
||||
} else {
|
||||
let v = f()(&k);
|
||||
Some((k, v))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user