Merge branch 'sharp/improve-chunk-memory' into 'master'

Improve chunk space consumption by a lot.

See merge request veloren/veloren!1415
This commit is contained in:
Joshua Yanovski 2020-09-28 19:08:11 +00:00
commit c0e32dd979
6 changed files with 119 additions and 9 deletions

View File

@ -89,7 +89,7 @@ impl<'a> TryFrom<&'a str> for BlockKind {
fn try_from(s: &'a str) -> Result<Self, Self::Error> { BLOCK_KINDS.get(s).copied().ok_or(()) }
}
#[derive(Copy, Clone, Debug, PartialEq, Serialize, Deserialize)]
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq, Serialize, Deserialize)]
pub struct Block {
kind: BlockKind,
attr: [u8; 3],

View File

@ -5,8 +5,8 @@ use crate::{
},
volumes::chunk::{Chunk, ChunkError, ChunkPosIter, ChunkVolIter},
};
use core::{hash::Hash, marker::PhantomData};
use serde::{Deserialize, Serialize};
use std::marker::PhantomData;
use vek::*;
#[derive(Debug)]
@ -65,6 +65,10 @@ impl<V, S: RectVolSize, M: Clone> Chonk<V, S, M> {
pub fn sub_chunks_len(&self) -> usize { self.sub_chunks.len() }
pub fn sub_chunk_groups(&self) -> usize {
self.sub_chunks.iter().map(SubChunk::num_groups).sum()
}
// Returns the index (in self.sub_chunks) of the SubChunk that contains
// layer z; note that this index changes when more SubChunks are prepended
fn sub_chunk_idx(&self, z: i32) -> i32 {
@ -80,6 +84,14 @@ impl<V, S: RectVolSize, M: Clone> Chonk<V, S, M> {
// Returns the z offset of the sub_chunk that contains layer z
fn sub_chunk_min_z(&self, z: i32) -> i32 { z - self.sub_chunk_z(z) }
/// Compress chunk by using more intelligent defaults.
pub fn defragment(&mut self)
where
V: Clone + Eq + Hash,
{
self.sub_chunks.iter_mut().for_each(SubChunk::defragment);
}
}
impl<V, S: RectVolSize, M: Clone> BaseVol for Chonk<V, S, M> {

View File

@ -1,8 +1,9 @@
use crate::vol::{
BaseVol, IntoPosIterator, IntoVolIterator, RasterableVol, ReadVol, VolSize, WriteVol,
};
use core::{hash::Hash, iter::Iterator, marker::PhantomData, mem};
use hashbrown::HashMap;
use serde::{Deserialize, Serialize};
use std::{iter::Iterator, marker::PhantomData};
use vek::*;
#[derive(Debug)]
@ -56,7 +57,7 @@ pub struct Chunk<V, S: VolSize, M> {
}
impl<V, S: VolSize, M> Chunk<V, S, M> {
const GROUP_COUNT: Vec3<u32> = Vec3::new(
pub const GROUP_COUNT: Vec3<u32> = Vec3::new(
S::SIZE.x / Self::GROUP_SIZE.x,
S::SIZE.y / Self::GROUP_SIZE.y,
S::SIZE.z / Self::GROUP_SIZE.z,
@ -115,12 +116,94 @@ impl<V, S: VolSize, M> Chunk<V, S, M> {
}
}
/// Compress this subchunk by frequency.
pub fn defragment(&mut self)
where
V: Clone + Eq + Hash,
{
// First, construct a HashMap with max capacity equal to GROUP_COUNT (since each
// filled group can have at most one slot).
let mut map = HashMap::with_capacity(Self::GROUP_COUNT_TOTAL as usize);
let vox = &self.vox;
let default = &self.default;
self.indices
.iter()
.enumerate()
.for_each(|(grp_idx, &base)| {
let start = usize::from(base) * Self::GROUP_VOLUME as usize;
let end = start + Self::GROUP_VOLUME as usize;
if let Some(group) = vox.get(start..end) {
// Check to see if all blocks in this group are the same.
let mut group = group.iter();
let first = group.next().expect("GROUP_VOLUME ≥ 1");
if group.all(|block| block == first) {
// All blocks in the group were the same, so add our position to this entry
// in the HashMap.
map.entry(first).or_insert(vec![]).push(grp_idx);
}
} else {
// This slot is empty (i.e. has the default value).
map.entry(default).or_insert(vec![]).push(grp_idx);
}
});
// Now, find the block with max frequency in the HashMap and make that our new
// default.
let (new_default, default_groups) = if let Some((new_default, default_groups)) = map
.into_iter()
.max_by_key(|(_, default_groups)| default_groups.len())
{
(new_default.clone(), default_groups)
} else {
// There is no good choice for default group, so leave it as is.
return;
};
// For simplicity, we construct a completely new voxel array rather than
// attempting in-place updates (TODO: consider changing this).
let mut new_vox =
Vec::with_capacity(Self::GROUP_COUNT_TOTAL as usize - default_groups.len());
let num_groups = self.num_groups();
self.indices
.iter_mut()
.enumerate()
.for_each(|(grp_idx, base)| {
if default_groups.contains(&grp_idx) {
// Default groups become 255
*base = 255;
} else {
// Other groups are allocated in increasing order by group index.
// NOTE: Cannot overflow since the current implicit group index can't be at the
// end of the vector until at the earliest after the 256th iteration.
let old_base = usize::from(mem::replace(
base,
(new_vox.len() / Self::GROUP_VOLUME as usize) as u8,
));
if old_base >= num_groups {
// Old default, which (since we reached this branch) is not equal to the new
// default, so we have to write out the old default.
new_vox
.resize(new_vox.len() + Self::GROUP_VOLUME as usize, default.clone());
} else {
let start = old_base * Self::GROUP_VOLUME as usize;
let end = start + Self::GROUP_VOLUME as usize;
new_vox.extend_from_slice(&vox[start..end]);
}
}
});
// Finally, reset our vox and default values to the new ones.
self.vox = new_vox;
self.default = new_default;
}
/// Get a reference to the internal metadata.
pub fn metadata(&self) -> &M { &self.meta }
/// Get a mutable reference to the internal metadata.
pub fn metadata_mut(&mut self) -> &mut M { &mut self.meta }
pub fn num_groups(&self) -> usize { self.vox.len() / Self::GROUP_VOLUME as usize }
#[inline(always)]
fn grp_idx(pos: Vec3<i32>) -> u32 {
let grp_pos = pos.map2(Self::GROUP_SIZE, |e, s| e as u32 / s);
@ -141,12 +224,12 @@ impl<V, S: VolSize, M> Chunk<V, S, M> {
fn idx_unchecked(&self, pos: Vec3<i32>) -> Option<usize> {
let grp_idx = Self::grp_idx(pos);
let rel_idx = Self::rel_idx(pos);
let base = self.indices[grp_idx as usize];
let base = u32::from(self.indices[grp_idx as usize]);
let num_groups = self.vox.len() as u32 / Self::GROUP_VOLUME;
if base as u32 >= num_groups {
if base >= num_groups {
None
} else {
Some((base as u32 * Self::GROUP_VOLUME + rel_idx) as usize)
Some((base * Self::GROUP_VOLUME + rel_idx) as usize)
}
}
@ -159,12 +242,12 @@ impl<V, S: VolSize, M> Chunk<V, S, M> {
let rel_idx = Self::rel_idx(pos);
let base = &mut self.indices[grp_idx as usize];
let num_groups = self.vox.len() as u32 / Self::GROUP_VOLUME;
if *base as u32 >= num_groups {
if u32::from(*base) >= num_groups {
*base = num_groups as u8;
self.vox
.extend(std::iter::repeat(self.default.clone()).take(Self::GROUP_VOLUME as usize));
}
(*base as u32 * Self::GROUP_VOLUME + rel_idx) as usize
(u32::from(*base) * Self::GROUP_VOLUME + rel_idx) as usize
}
#[inline(always)]

View File

@ -720,12 +720,15 @@ impl Server {
.set(self.state.ecs().read_resource::<TimeOfDay>().0);
if self.tick_metrics.is_100th_tick() {
let mut chonk_cnt = 0;
let mut group_cnt = 0;
let chunk_cnt = self.state.terrain().iter().fold(0, |a, (_, c)| {
chonk_cnt += 1;
group_cnt += c.sub_chunk_groups();
a + c.sub_chunks_len()
});
self.tick_metrics.chonks_count.set(chonk_cnt as i64);
self.tick_metrics.chunks_count.set(chunk_cnt as i64);
self.tick_metrics.chunk_groups_count.set(group_cnt as i64);
let entity_count = self.state.ecs().entities().join().count();
self.tick_metrics.entity_count.set(entity_count as i64);

View File

@ -45,6 +45,7 @@ pub struct ChunkGenMetrics {
pub struct TickMetrics {
pub chonks_count: IntGauge,
pub chunks_count: IntGauge,
pub chunk_groups_count: IntGauge,
pub entity_count: IntGauge,
pub tick_time: IntGaugeVec,
pub build_info: IntGauge,
@ -237,6 +238,10 @@ impl TickMetrics {
"chunks_count",
"number of all chunks currently active on the server",
))?;
let chunk_groups_count = IntGauge::with_opts(Opts::new(
"chunk_groups_count",
"number of 4×4×4 groups currently allocated by chunks on the server",
))?;
let entity_count = IntGauge::with_opts(Opts::new(
"entity_count",
"number of all entities currently active on the server",
@ -267,6 +272,7 @@ impl TickMetrics {
let chonks_count_clone = chonks_count.clone();
let chunks_count_clone = chunks_count.clone();
let chunk_groups_count_clone = chunk_groups_count.clone();
let entity_count_clone = entity_count.clone();
let build_info_clone = build_info.clone();
let start_time_clone = start_time.clone();
@ -277,6 +283,7 @@ impl TickMetrics {
let f = |registry: &Registry| {
registry.register(Box::new(chonks_count_clone))?;
registry.register(Box::new(chunks_count_clone))?;
registry.register(Box::new(chunk_groups_count_clone))?;
registry.register(Box::new(entity_count_clone))?;
registry.register(Box::new(build_info_clone))?;
registry.register(Box::new(start_time_clone))?;
@ -290,6 +297,7 @@ impl TickMetrics {
Self {
chonks_count,
chunks_count,
chunk_groups_count,
entity_count,
tick_time,
build_info,

View File

@ -153,6 +153,7 @@ impl World {
let meta = TerrainChunkMeta::new(sim_chunk.get_name(&self.sim), sim_chunk.get_biome());
let mut chunk = TerrainChunk::new(base_z, stone, air, meta);
for y in 0..TerrainChunkSize::RECT_SIZE.y as i32 {
for x in 0..TerrainChunkSize::RECT_SIZE.x as i32 {
if should_continue() {
@ -316,6 +317,9 @@ impl World {
)
});
// Finally, defragment to minimize space consumption.
chunk.defragment();
Ok((chunk, supplement))
}
}