mirror of
https://gitlab.com/veloren/veloren.git
synced 2024-08-30 18:12:32 +00:00
Somewhat zoomier serde.
This commit is contained in:
parent
a3b7127de9
commit
50070e738b
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -6554,6 +6554,7 @@ dependencies = [
|
||||
"approx 0.4.0",
|
||||
"bitflags",
|
||||
"bitvec",
|
||||
"bytemuck",
|
||||
"chrono",
|
||||
"chrono-tz",
|
||||
"clap 2.34.0",
|
||||
|
@ -40,6 +40,7 @@ strum = { version = "0.24", features = ["derive"] }
|
||||
approx = "0.4.0"
|
||||
bitvec = "0.22"
|
||||
# bumpalo = { version = "3.9.1", features = ["allocator_api"] }
|
||||
bytemuck = { version="1.4", features=["derive"] }
|
||||
clap = "2.33"
|
||||
crossbeam-utils = "0.8.1"
|
||||
bitflags = "1.2"
|
||||
|
@ -672,10 +672,10 @@ impl<const AVERAGE_PALETTE: bool> VoxelImageDecoding for TriPngEncoding<AVERAGE_
|
||||
}
|
||||
}
|
||||
|
||||
pub fn image_terrain_chonk<S: RectVolSize, M: Clone, P: PackingFormula, VIE: VoxelImageEncoding>(
|
||||
pub fn image_terrain_chonk<S: RectVolSize, Storage: core::ops::DerefMut<Target=Vec<Block>>, M: Clone, P: PackingFormula, VIE: VoxelImageEncoding>(
|
||||
vie: &VIE,
|
||||
packing: P,
|
||||
chonk: &Chonk<Block, S, M>,
|
||||
chonk: &Chonk<Block, Storage, S, M>,
|
||||
) -> Option<VIE::Output> {
|
||||
image_terrain(
|
||||
vie,
|
||||
@ -688,13 +688,14 @@ pub fn image_terrain_chonk<S: RectVolSize, M: Clone, P: PackingFormula, VIE: Vox
|
||||
|
||||
pub fn image_terrain_volgrid<
|
||||
S: RectVolSize + Debug,
|
||||
Storage: core::ops::DerefMut<Target=Vec<Block>> + Debug,
|
||||
M: Clone + Debug,
|
||||
P: PackingFormula,
|
||||
VIE: VoxelImageEncoding,
|
||||
>(
|
||||
vie: &VIE,
|
||||
packing: P,
|
||||
volgrid: &VolGrid2d<Chonk<Block, S, M>>,
|
||||
volgrid: &VolGrid2d<Chonk<Block, Storage, S, M>>,
|
||||
) -> Option<VIE::Output> {
|
||||
let mut lo = Vec3::broadcast(i32::MAX);
|
||||
let mut hi = Vec3::broadcast(i32::MIN);
|
||||
@ -818,7 +819,7 @@ pub struct WireChonk<VIE: VoxelImageEncoding, P: PackingFormula, M: Clone, S: Re
|
||||
impl<VIE: VoxelImageEncoding + VoxelImageDecoding, P: PackingFormula, M: Clone, S: RectVolSize>
|
||||
WireChonk<VIE, P, M, S>
|
||||
{
|
||||
pub fn from_chonk(vie: VIE, packing: P, chonk: &Chonk<Block, S, M>) -> Option<Self> {
|
||||
pub fn from_chonk<Storage: core::ops::DerefMut<Target=Vec<Block>>>(vie: VIE, packing: P, chonk: &Chonk<Block, Storage, S, M>) -> Option<Self> {
|
||||
let data = image_terrain_chonk(&vie, packing, chonk)?;
|
||||
Some(Self {
|
||||
zmin: chonk.get_min_z(),
|
||||
@ -835,7 +836,7 @@ impl<VIE: VoxelImageEncoding + VoxelImageDecoding, P: PackingFormula, M: Clone,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn to_chonk(&self) -> Option<Chonk<Block, S, M>> {
|
||||
pub fn to_chonk<Storage: Clone + core::ops::DerefMut<Target=Vec<Block>> + From<Vec<Block>>>(&self) -> Option<Chonk<Block, Storage, S, M>> {
|
||||
let mut chonk = Chonk::new(self.zmin, self.below, self.above, self.meta.clone());
|
||||
write_image_terrain(
|
||||
&self.vie,
|
||||
|
@ -1,5 +1,5 @@
|
||||
use zerocopy::AsBytes;
|
||||
use super::SpriteKind;
|
||||
use bitvec::prelude::*;
|
||||
use crate::{
|
||||
comp::{fluid_dynamics::LiquidKind, tool::ToolKind},
|
||||
consts::FRIC_GROUND,
|
||||
@ -7,10 +7,11 @@ use crate::{
|
||||
};
|
||||
use num_derive::FromPrimitive;
|
||||
use num_traits::FromPrimitive;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde::{ser, Deserialize, Serialize};
|
||||
use std::ops::Deref;
|
||||
use strum::{Display, EnumIter, EnumString};
|
||||
use vek::*;
|
||||
use zerocopy::AsBytes;
|
||||
|
||||
make_case_elim!(
|
||||
block_kind,
|
||||
@ -30,6 +31,11 @@ make_case_elim!(
|
||||
Display,
|
||||
)]
|
||||
#[repr(u8)]
|
||||
/// XXX(@Sharp): If you feel like significantly modifying how BlockKind works, you *MUST* also
|
||||
/// update the implementation of BlockVec! BlockVec uses unsafe code that relies on EnumIter.
|
||||
/// If you are just adding variants, that's fine (for now), but any other changes (like
|
||||
/// changing from repr(u8)) need review.
|
||||
///
|
||||
/// NOTE: repr(u8) preserves the niche optimization for fieldless enums!
|
||||
pub enum BlockKind {
|
||||
Air = 0x00, // Air counts as a fluid
|
||||
@ -113,8 +119,12 @@ impl BlockKind {
|
||||
}
|
||||
}
|
||||
|
||||
/// XXX(@Sharp): If you feel like significantly modifying how Block works, you *MUST* also update
|
||||
/// the implementation of BlockVec! BlockVec uses unsafe code that depends on being able to
|
||||
/// independently validate the kind and treat attr as bytes; changing things so that this no longer
|
||||
/// works will require careful review.
|
||||
#[derive(AsBytes, Copy, Clone, Debug, Eq, Serialize, Deserialize)]
|
||||
/// NOTE: repr(C) appears to preservre niche optimizations!
|
||||
/// NOTE: repr(C) appears to preserve niche optimizations!
|
||||
#[repr(align(4), C)]
|
||||
pub struct Block {
|
||||
kind: BlockKind,
|
||||
@ -431,6 +441,125 @@ impl Block {
|
||||
}
|
||||
}
|
||||
|
||||
/// A wrapper around Vec<Block>, usable for efficient deserialization.
|
||||
///
|
||||
/// XXX(@Sharp): This is crucially interwoven with the definition of Block and BlockKind, as it
|
||||
/// uses unsafe code to speed up deserialization. If you decide to change how these types work in
|
||||
/// a significant way (i.e. beyond adding new variants to BlockKind), this needs careful review!
|
||||
#[derive(
|
||||
Clone,
|
||||
Debug,
|
||||
Deserialize,
|
||||
Hash,
|
||||
Eq,
|
||||
PartialEq,
|
||||
)]
|
||||
#[serde(try_from = "&'_ [u8]")]
|
||||
pub struct BlockVec(Vec<Block>);
|
||||
|
||||
impl core::ops::Deref for BlockVec {
|
||||
type Target = Vec<Block>;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl core::ops::DerefMut for BlockVec {
|
||||
#[inline]
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<Block>> for BlockVec {
|
||||
#[inline]
|
||||
fn from(inner: Vec<Block>) -> Self {
|
||||
Self(inner)
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for BlockVec {
|
||||
/// We can *safely* serialize a BlockVec as a Vec of bytes (this is validated by AsBytes).
|
||||
/// This also means that the representation here is architecture independent.
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: ser::Serializer,
|
||||
{
|
||||
serializer.serialize_bytes(self.0.as_bytes())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a/*, Error: de::Error*/> TryFrom<&'a [u8]> for BlockVec {
|
||||
type Error = &'static str;
|
||||
/// XXX(@Sharp): This implementation is subtle and its safety depens on correct implementation!
|
||||
/// It is well-commented, but those comments are only valid so long as this implementation
|
||||
/// doesn't change. If you do need to change this implementation, please seek careful review!
|
||||
///
|
||||
/// NOTE: Ideally, we would perform a try_from(Vec<u8>) instead, to avoid the extra copy.
|
||||
/// Unfortunately this is not generally sound, since Vec allocations must be deallocated with
|
||||
/// the same layout with which they were allocated, which includes alignment (and no, it does
|
||||
/// not matter if they in practice have the same alignment at runtime, it's still UB). If we
|
||||
/// were to do this, we'd effectively have to hold a Vec<u8> inside BlockVec at all times, not
|
||||
/// exposing &mut access at all, and instead requiring transmutes to get access to Blocks.
|
||||
/// This seems like a huge pain so for now, hopefully deserialize (the non-owned version) is
|
||||
/// sufficient.
|
||||
#[allow(unsafe_code)]
|
||||
fn try_from(blocks: &'a [u8]) -> Result<Self, Self::Error>
|
||||
{
|
||||
// First, make sure we're correctly interpretable as a [u8; 4].
|
||||
let blocks: &[[u8; 4]] = bytemuck::try_cast_slice(blocks)
|
||||
.map_err(|_| /*Error::invalid_length(blocks.len(), &"a multiple of 4")*/"Length must be a multiple of 4")?;
|
||||
// The basic observation here is that a slice of [u8; 4] is *almost* the same as a slice of
|
||||
// blocks, so conversion from the former to the latter can be very cheap. The only problem
|
||||
// is that BlockKind (the first byte in `Block`) has some invalid states, so not every u8
|
||||
// slice of the appropriate size is a block slice. Fortunately, since we don't care about
|
||||
// figuring out which block triggered the error, we can figure this out really cheaply--we
|
||||
// just have to set a bit for every block we see, then check at the end to make sure all
|
||||
// the bits we set are valid elements. We can construct the valid bit set using EnumIter,
|
||||
// and the requirement is: (!valid & set_bits) = 0.
|
||||
|
||||
// Construct the invalid list. Initially, it's all 1s, then we set all the bits
|
||||
// corresponding to valid block kinds to 0, leaving a set bit for each invalid block kind.
|
||||
//
|
||||
// TODO: Verify whether this gets constant folded away; if not, try to do this as a const
|
||||
// fn? Might need to modify the EnumIter implementation.
|
||||
let mut invalid_bits = bitarr![1; 256];
|
||||
<BlockKind as strum::IntoEnumIterator>::iter().for_each(|bk| {
|
||||
invalid_bits.set((bk as u8).into(), false);
|
||||
});
|
||||
|
||||
// Initially, the set bit list is empty.
|
||||
let mut set_bits = bitarr![0; 256];
|
||||
|
||||
// TODO: SIMD iteration.
|
||||
// NOTE: The block kind is guaranteed to be at the front, thanks to the repr(C).
|
||||
blocks.into_iter().for_each(|&[kind, _, _, _]| {
|
||||
// TODO: Check assembly to see if the bounds check gets elided; if so, leave this as
|
||||
// set instead of set_unchecked, to scope down the use of unsafe as much as possible.
|
||||
set_bits.set(kind.into(), true);
|
||||
});
|
||||
|
||||
// The invalid bits and the set bits should have no overlap.
|
||||
set_bits &= invalid_bits;
|
||||
if set_bits.any() {
|
||||
// At least one invalid bit was set, so there was an invalid BlockKind somewhere.
|
||||
//
|
||||
// TODO: Use radix representation of the bad block kind.
|
||||
return Err(/*Error::unknown_variant("an invalid u8", &["see the definition of BlockKind for details"])*/"Found an unknown BlockKind while parsing Vec<Block>");
|
||||
}
|
||||
// All set bits are cleared, so all block kinds were valid. Combined with the slice being
|
||||
// compatible with [u8; 4], we can transmute the slice to a slice of Blocks and then
|
||||
// construct a new vector from it.
|
||||
let blocks = unsafe { core::mem::transmute::<&'a [[u8; 4]], &'a [Block]>(blocks) };
|
||||
// Finally, *safely* construct a vector from the new blocks (as mentioned above, we cannot
|
||||
// reuse the old byte vector even if we wanted to, since it doesn't have the same
|
||||
// alignment as Block).
|
||||
Ok(Self(blocks.to_vec()))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
@ -16,13 +16,42 @@ pub enum ChonkError {
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct SubChunkSize<ChonkSize: RectVolSize> {
|
||||
phantom: PhantomData<ChonkSize>,
|
||||
pub struct SubChunkSize<V, Storage, ChonkSize: RectVolSize> {
|
||||
storage: Storage,
|
||||
phantom: PhantomData<(V, ChonkSize)>,
|
||||
}
|
||||
|
||||
impl<V, Storage: core::ops::Deref<Target=Vec<V>>, ChonkSize: RectVolSize> core::ops::Deref for SubChunkSize<V, Storage, ChonkSize> {
|
||||
type Target = Vec<V>;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.storage
|
||||
}
|
||||
}
|
||||
|
||||
impl<V, Storage: core::ops::DerefMut<Target=Vec<V>>, ChonkSize: RectVolSize> core::ops::DerefMut for SubChunkSize<V, Storage, ChonkSize> {
|
||||
#[inline]
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.storage
|
||||
}
|
||||
}
|
||||
|
||||
impl<V, Storage: From<Vec<V>>, ChonkSize: RectVolSize> From<Vec<V>> for SubChunkSize<V, Storage, ChonkSize> {
|
||||
#[inline]
|
||||
fn from(storage: Vec<V>) -> Self {
|
||||
Self {
|
||||
storage: storage.into(),
|
||||
phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO (haslersn): Assert ChonkSize::RECT_SIZE.x == ChonkSize::RECT_SIZE.y
|
||||
|
||||
impl<ChonkSize: RectVolSize> VolSize for SubChunkSize<ChonkSize> {
|
||||
impl<V, Storage, ChonkSize: RectVolSize> VolSize<V> for SubChunkSize<V, Storage, ChonkSize>
|
||||
/* where Storage: Clone + core::ops::Deref<Target=Vec<V>> + core::ops::DerefMut + From<Vec<V>>,
|
||||
* */
|
||||
{
|
||||
const SIZE: Vec3<u32> = Vec3 {
|
||||
x: ChonkSize::RECT_SIZE.x,
|
||||
y: ChonkSize::RECT_SIZE.x,
|
||||
@ -31,19 +60,19 @@ impl<ChonkSize: RectVolSize> VolSize for SubChunkSize<ChonkSize> {
|
||||
};
|
||||
}
|
||||
|
||||
type SubChunk<V, S, M> = Chunk<V, SubChunkSize<S>, M>;
|
||||
type SubChunk<V, Storage, S, M> = Chunk<V, SubChunkSize<V, Storage, S>, M>;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Chonk<V, S: RectVolSize, M: Clone> {
|
||||
pub struct Chonk<V, Storage, S: RectVolSize, M: Clone> {
|
||||
z_offset: i32,
|
||||
sub_chunks: Vec<SubChunk<V, S, M>>,
|
||||
sub_chunks: Vec<SubChunk<V, Storage, S, M>>,
|
||||
below: V,
|
||||
above: V,
|
||||
meta: M,
|
||||
phantom: PhantomData<S>,
|
||||
}
|
||||
|
||||
impl<V, S: RectVolSize, M: Clone> Chonk<V, S, M> {
|
||||
impl<V, Storage: core::ops::DerefMut<Target=Vec<V>>, S: RectVolSize, M: Clone> Chonk<V, Storage, S, M> {
|
||||
pub fn new(z_offset: i32, below: V, above: V, meta: M) -> Self {
|
||||
Self {
|
||||
z_offset,
|
||||
@ -62,7 +91,7 @@ impl<V, S: RectVolSize, M: Clone> Chonk<V, S, M> {
|
||||
|
||||
#[inline]
|
||||
pub fn get_max_z(&self) -> i32 {
|
||||
self.z_offset + (self.sub_chunks.len() as u32 * SubChunkSize::<S>::SIZE.z) as i32
|
||||
self.z_offset + (self.sub_chunks.len() as u32 * SubChunkSize::<V, Storage, S>::SIZE.z) as i32
|
||||
}
|
||||
|
||||
pub fn sub_chunks_len(&self) -> usize { self.sub_chunks.len() }
|
||||
@ -80,8 +109,8 @@ impl<V, S: RectVolSize, M: Clone> Chonk<V, S, M> {
|
||||
.enumerate()
|
||||
.filter(|(_, sc)| sc.num_groups() > 0)
|
||||
.flat_map(move |(i, sc)| {
|
||||
let z_offset = self.z_offset + i as i32 * SubChunkSize::<S>::SIZE.z as i32;
|
||||
sc.vol_iter(Vec3::zero(), SubChunkSize::<S>::SIZE.map(|e| e as i32))
|
||||
let z_offset = self.z_offset + i as i32 * SubChunkSize::<V, Storage, S>::SIZE.z as i32;
|
||||
sc.vol_iter(Vec3::zero(), SubChunkSize::<V, Storage, S>::SIZE.map(|e| e as i32))
|
||||
.map(move |(pos, vox)| (pos + Vec3::unit_z() * z_offset, vox))
|
||||
})
|
||||
}
|
||||
@ -91,13 +120,13 @@ impl<V, S: RectVolSize, M: Clone> Chonk<V, S, M> {
|
||||
#[inline]
|
||||
fn sub_chunk_idx(&self, z: i32) -> i32 {
|
||||
let diff = z - self.z_offset;
|
||||
diff >> (SubChunkSize::<S>::SIZE.z - 1).count_ones()
|
||||
diff >> (SubChunkSize::<V, Storage, S>::SIZE.z - 1).count_ones()
|
||||
}
|
||||
|
||||
// Converts a z coordinate into a local z coordinate within a sub chunk
|
||||
fn sub_chunk_z(&self, z: i32) -> i32 {
|
||||
let diff = z - self.z_offset;
|
||||
diff & (SubChunkSize::<S>::SIZE.z - 1) as i32
|
||||
diff & (SubChunkSize::<V, Storage, S>::SIZE.z - 1) as i32
|
||||
}
|
||||
|
||||
// Returns the z offset of the sub_chunk that contains layer z
|
||||
@ -106,6 +135,7 @@ impl<V, S: RectVolSize, M: Clone> Chonk<V, S, M> {
|
||||
/// Compress chunk by using more intelligent defaults.
|
||||
pub fn defragment(&mut self)
|
||||
where
|
||||
Storage: From<Vec<V>>,
|
||||
V: zerocopy::AsBytes + Clone + Eq + Hash,
|
||||
[(); { core::mem::size_of::<V>() }]:,
|
||||
{
|
||||
@ -149,20 +179,20 @@ impl<V, S: RectVolSize, M: Clone> Chonk<V, S, M> {
|
||||
// Finally, bump the z_offset to account for the removed subchunks at the
|
||||
// bottom. TODO: Add invariants to justify why `below_len` must fit in
|
||||
// i32.
|
||||
self.z_offset += below_len as i32 * SubChunkSize::<S>::SIZE.z as i32;
|
||||
self.z_offset += below_len as i32 * SubChunkSize::<V, Storage, S>::SIZE.z as i32;
|
||||
}
|
||||
}
|
||||
|
||||
impl<V, S: RectVolSize, M: Clone> BaseVol for Chonk<V, S, M> {
|
||||
impl<V, Storage, S: RectVolSize, M: Clone> BaseVol for Chonk<V, Storage, S, M> {
|
||||
type Error = ChonkError;
|
||||
type Vox = V;
|
||||
}
|
||||
|
||||
impl<V, S: RectVolSize, M: Clone> RectRasterableVol for Chonk<V, S, M> {
|
||||
impl<V, Storage, S: RectVolSize, M: Clone> RectRasterableVol for Chonk<V, Storage, S, M> {
|
||||
const RECT_SIZE: Vec2<u32> = S::RECT_SIZE;
|
||||
}
|
||||
|
||||
impl<V, S: RectVolSize, M: Clone> ReadVol for Chonk<V, S, M> {
|
||||
impl<V, Storage: core::ops::DerefMut<Target=Vec<V>>, S: RectVolSize, M: Clone> ReadVol for Chonk<V, Storage, S, M> {
|
||||
#[inline(always)]
|
||||
fn get(&self, pos: Vec3<i32>) -> Result<&V, Self::Error> {
|
||||
if pos.z < self.get_min_z() {
|
||||
@ -176,7 +206,7 @@ impl<V, S: RectVolSize, M: Clone> ReadVol for Chonk<V, S, M> {
|
||||
let sub_chunk_idx = self.sub_chunk_idx(pos.z);
|
||||
let rpos = pos
|
||||
- Vec3::unit_z()
|
||||
* (self.z_offset + sub_chunk_idx * SubChunkSize::<S>::SIZE.z as i32);
|
||||
* (self.z_offset + sub_chunk_idx * SubChunkSize::<V, Storage, S>::SIZE.z as i32);
|
||||
self.sub_chunks[sub_chunk_idx as usize]
|
||||
.get(rpos)
|
||||
.map_err(Self::Error::SubChunkError)
|
||||
@ -184,7 +214,7 @@ impl<V, S: RectVolSize, M: Clone> ReadVol for Chonk<V, S, M> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<V: Clone + PartialEq, S: RectVolSize, M: Clone> WriteVol for Chonk<V, S, M> {
|
||||
impl<V: Clone + PartialEq, Storage: Clone + core::ops::DerefMut<Target=Vec<V>> + From<Vec<V>>, S: Clone + RectVolSize, M: Clone> WriteVol for Chonk<V, Storage, S, M> {
|
||||
#[inline(always)]
|
||||
fn set(&mut self, pos: Vec3<i32>, block: Self::Vox) -> Result<V, Self::Error> {
|
||||
let mut sub_chunk_idx = self.sub_chunk_idx(pos.z);
|
||||
@ -195,10 +225,10 @@ impl<V: Clone + PartialEq, S: RectVolSize, M: Clone> WriteVol for Chonk<V, S, M>
|
||||
return Ok(self.below.clone());
|
||||
}
|
||||
// Prepend exactly sufficiently many SubChunks via Vec::splice
|
||||
let c = Chunk::<V, SubChunkSize<S>, M>::filled(self.below.clone(), self.meta.clone());
|
||||
let c = Chunk::<V, SubChunkSize<V, Storage, S>, M>::filled(self.below.clone(), self.meta.clone());
|
||||
let n = (-sub_chunk_idx) as usize;
|
||||
self.sub_chunks.splice(0..0, std::iter::repeat(c).take(n));
|
||||
self.z_offset += sub_chunk_idx * SubChunkSize::<S>::SIZE.z as i32;
|
||||
self.z_offset += sub_chunk_idx * SubChunkSize::<V, Storage, S>::SIZE.z as i32;
|
||||
sub_chunk_idx = 0;
|
||||
} else if pos.z >= self.get_max_z() {
|
||||
// Make sure we're not adding a redundant chunk.
|
||||
@ -206,27 +236,27 @@ impl<V: Clone + PartialEq, S: RectVolSize, M: Clone> WriteVol for Chonk<V, S, M>
|
||||
return Ok(self.above.clone());
|
||||
}
|
||||
// Append exactly sufficiently many SubChunks via Vec::extend
|
||||
let c = Chunk::<V, SubChunkSize<S>, M>::filled(self.above.clone(), self.meta.clone());
|
||||
let c = Chunk::<V, SubChunkSize<V, Storage, S>, M>::filled(self.above.clone(), self.meta.clone());
|
||||
let n = 1 + sub_chunk_idx as usize - self.sub_chunks.len();
|
||||
self.sub_chunks.extend(std::iter::repeat(c).take(n));
|
||||
}
|
||||
|
||||
let rpos = pos
|
||||
- Vec3::unit_z() * (self.z_offset + sub_chunk_idx * SubChunkSize::<S>::SIZE.z as i32);
|
||||
- Vec3::unit_z() * (self.z_offset + sub_chunk_idx * SubChunkSize::<V, Storage, S>::SIZE.z as i32);
|
||||
self.sub_chunks[sub_chunk_idx as usize] // TODO (haslersn): self.sub_chunks.get(...).and_then(...)
|
||||
.set(rpos, block)
|
||||
.map_err(Self::Error::SubChunkError)
|
||||
}
|
||||
}
|
||||
|
||||
struct ChonkIterHelper<V, S: RectVolSize, M: Clone> {
|
||||
struct ChonkIterHelper<V, Storage, S: RectVolSize, M: Clone> {
|
||||
sub_chunk_min_z: i32,
|
||||
lower_bound: Vec3<i32>,
|
||||
upper_bound: Vec3<i32>,
|
||||
phantom: PhantomData<Chonk<V, S, M>>,
|
||||
phantom: PhantomData<Chonk<V, Storage, S, M>>,
|
||||
}
|
||||
|
||||
impl<V, S: RectVolSize, M: Clone> Iterator for ChonkIterHelper<V, S, M> {
|
||||
impl<V, Storage, S: RectVolSize, M: Clone> Iterator for ChonkIterHelper<V, Storage, S, M> {
|
||||
type Item = (i32, Vec3<i32>, Vec3<i32>);
|
||||
|
||||
#[inline(always)]
|
||||
@ -239,19 +269,19 @@ impl<V, S: RectVolSize, M: Clone> Iterator for ChonkIterHelper<V, S, M> {
|
||||
let current_min_z = self.sub_chunk_min_z;
|
||||
lb.z -= current_min_z;
|
||||
ub.z -= current_min_z;
|
||||
ub.z = std::cmp::min(ub.z, SubChunkSize::<S>::SIZE.z as i32);
|
||||
self.sub_chunk_min_z += SubChunkSize::<S>::SIZE.z as i32;
|
||||
ub.z = std::cmp::min(ub.z, SubChunkSize::<V, Storage, S>::SIZE.z as i32);
|
||||
self.sub_chunk_min_z += SubChunkSize::<V, Storage, S>::SIZE.z as i32;
|
||||
self.lower_bound.z = self.sub_chunk_min_z;
|
||||
Some((current_min_z, lb, ub))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ChonkPosIter<V, S: RectVolSize, M: Clone> {
|
||||
outer: ChonkIterHelper<V, S, M>,
|
||||
opt_inner: Option<(i32, ChunkPosIter<V, SubChunkSize<S>, M>)>,
|
||||
pub struct ChonkPosIter<V, Storage, S: RectVolSize, M: Clone> {
|
||||
outer: ChonkIterHelper<V, Storage, S, M>,
|
||||
opt_inner: Option<(i32, ChunkPosIter<V, SubChunkSize<V, Storage, S>, M>)>,
|
||||
}
|
||||
|
||||
impl<V, S: RectVolSize, M: Clone> Iterator for ChonkPosIter<V, S, M> {
|
||||
impl<V, Storage, S: RectVolSize, M: Clone> Iterator for ChonkPosIter<V, Storage, S, M> {
|
||||
type Item = Vec3<i32>;
|
||||
|
||||
#[inline(always)]
|
||||
@ -266,25 +296,25 @@ impl<V, S: RectVolSize, M: Clone> Iterator for ChonkPosIter<V, S, M> {
|
||||
match self.outer.next() {
|
||||
None => return None,
|
||||
Some((sub_chunk_min_z, lb, ub)) => {
|
||||
self.opt_inner = Some((sub_chunk_min_z, SubChunk::<V, S, M>::pos_iter(lb, ub)))
|
||||
self.opt_inner = Some((sub_chunk_min_z, SubChunk::<V, Storage, S, M>::pos_iter(lb, ub)))
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum InnerChonkVolIter<'a, V, S: RectVolSize, M: Clone> {
|
||||
Vol(ChunkVolIter<'a, V, SubChunkSize<S>, M>),
|
||||
Pos(ChunkPosIter<V, SubChunkSize<S>, M>),
|
||||
enum InnerChonkVolIter<'a, V, Storage, S: RectVolSize, M: Clone> {
|
||||
Vol(ChunkVolIter<'a, V, SubChunkSize<V, Storage, S>, M>),
|
||||
Pos(ChunkPosIter<V, SubChunkSize<V, Storage, S>, M>),
|
||||
}
|
||||
|
||||
pub struct ChonkVolIter<'a, V, S: RectVolSize, M: Clone> {
|
||||
chonk: &'a Chonk<V, S, M>,
|
||||
outer: ChonkIterHelper<V, S, M>,
|
||||
opt_inner: Option<(i32, InnerChonkVolIter<'a, V, S, M>)>,
|
||||
pub struct ChonkVolIter<'a, V, Storage, S: RectVolSize, M: Clone> {
|
||||
chonk: &'a Chonk<V, Storage, S, M>,
|
||||
outer: ChonkIterHelper<V, Storage, S, M>,
|
||||
opt_inner: Option<(i32, InnerChonkVolIter<'a, V, Storage, S, M>)>,
|
||||
}
|
||||
|
||||
impl<'a, V, S: RectVolSize, M: Clone> Iterator for ChonkVolIter<'a, V, S, M> {
|
||||
impl<'a, V, Storage: core::ops::DerefMut<Target=Vec<V>>, S: RectVolSize, M: Clone> Iterator for ChonkVolIter<'a, V, Storage, S, M> {
|
||||
type Item = (Vec3<i32>, &'a V);
|
||||
|
||||
#[inline(always)]
|
||||
@ -292,8 +322,8 @@ impl<'a, V, S: RectVolSize, M: Clone> Iterator for ChonkVolIter<'a, V, S, M> {
|
||||
loop {
|
||||
if let Some((sub_chunk_min_z, ref mut inner)) = self.opt_inner {
|
||||
let got = match inner {
|
||||
InnerChonkVolIter::<'a, V, S, M>::Vol(iter) => iter.next(),
|
||||
InnerChonkVolIter::<'a, V, S, M>::Pos(iter) => iter.next().map(|pos| {
|
||||
InnerChonkVolIter::<'a, V, Storage, S, M>::Vol(iter) => iter.next(),
|
||||
InnerChonkVolIter::<'a, V, Storage, S, M>::Pos(iter) => iter.next().map(|pos| {
|
||||
if sub_chunk_min_z < self.chonk.get_min_z() {
|
||||
(pos, &self.chonk.below)
|
||||
} else {
|
||||
@ -312,9 +342,9 @@ impl<'a, V, S: RectVolSize, M: Clone> Iterator for ChonkVolIter<'a, V, S, M> {
|
||||
let inner = if sub_chunk_min_z < self.chonk.get_min_z()
|
||||
|| sub_chunk_min_z >= self.chonk.get_max_z()
|
||||
{
|
||||
InnerChonkVolIter::<'a, V, S, M>::Pos(SubChunk::<V, S, M>::pos_iter(lb, ub))
|
||||
InnerChonkVolIter::<'a, V, Storage, S, M>::Pos(SubChunk::<V, Storage, S, M>::pos_iter(lb, ub))
|
||||
} else {
|
||||
InnerChonkVolIter::<'a, V, S, M>::Vol(
|
||||
InnerChonkVolIter::<'a, V, Storage, S, M>::Vol(
|
||||
self.chonk.sub_chunks
|
||||
[self.chonk.sub_chunk_idx(sub_chunk_min_z) as usize]
|
||||
.vol_iter(lb, ub),
|
||||
@ -327,12 +357,12 @@ impl<'a, V, S: RectVolSize, M: Clone> Iterator for ChonkVolIter<'a, V, S, M> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, V, S: RectVolSize, M: Clone> IntoPosIterator for &'a Chonk<V, S, M> {
|
||||
type IntoIter = ChonkPosIter<V, S, M>;
|
||||
impl<'a, V, Storage: core::ops::DerefMut<Target=Vec<V>>, S: RectVolSize, M: Clone> IntoPosIterator for &'a Chonk<V, Storage, S, M> {
|
||||
type IntoIter = ChonkPosIter<V, Storage, S, M>;
|
||||
|
||||
fn pos_iter(self, lower_bound: Vec3<i32>, upper_bound: Vec3<i32>) -> Self::IntoIter {
|
||||
Self::IntoIter {
|
||||
outer: ChonkIterHelper::<V, S, M> {
|
||||
outer: ChonkIterHelper::<V, Storage, S, M> {
|
||||
sub_chunk_min_z: self.sub_chunk_min_z(lower_bound.z),
|
||||
lower_bound,
|
||||
upper_bound,
|
||||
@ -343,13 +373,13 @@ impl<'a, V, S: RectVolSize, M: Clone> IntoPosIterator for &'a Chonk<V, S, M> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, V, S: RectVolSize, M: Clone> IntoVolIterator<'a> for &'a Chonk<V, S, M> {
|
||||
type IntoIter = ChonkVolIter<'a, V, S, M>;
|
||||
impl<'a, V, Storage: core::ops::DerefMut<Target=Vec<V>>, S: RectVolSize, M: Clone> IntoVolIterator<'a> for &'a Chonk<V, Storage, S, M> {
|
||||
type IntoIter = ChonkVolIter<'a, V, Storage, S, M>;
|
||||
|
||||
fn vol_iter(self, lower_bound: Vec3<i32>, upper_bound: Vec3<i32>) -> Self::IntoIter {
|
||||
Self::IntoIter {
|
||||
chonk: self,
|
||||
outer: ChonkIterHelper::<V, S, M> {
|
||||
outer: ChonkIterHelper::<V, Storage, S, M> {
|
||||
sub_chunk_min_z: self.sub_chunk_min_z(lower_bound.z),
|
||||
lower_bound,
|
||||
upper_bound,
|
||||
|
@ -9,7 +9,7 @@ pub mod structure;
|
||||
// Reexports
|
||||
pub use self::{
|
||||
biome::BiomeKind,
|
||||
block::{Block, BlockKind},
|
||||
block::{Block, BlockKind, BlockVec},
|
||||
map::MapSizeLg,
|
||||
site::SitesKind,
|
||||
sprite::SpriteKind,
|
||||
@ -157,7 +157,7 @@ impl TerrainChunkMeta {
|
||||
|
||||
// Terrain type aliases
|
||||
|
||||
pub type TerrainChunk = chonk::Chonk<Block, TerrainChunkSize, TerrainChunkMeta>;
|
||||
pub type TerrainChunk = chonk::Chonk<Block, BlockVec, TerrainChunkSize, TerrainChunkMeta>;
|
||||
pub type TerrainGrid = VolGrid2d<TerrainChunk>;
|
||||
|
||||
impl TerrainGrid {
|
||||
|
@ -4,7 +4,12 @@ use vek::*;
|
||||
|
||||
/// Used to specify a volume's compile-time size. This exists as a substitute
|
||||
/// until const generics are implemented.
|
||||
pub trait VolSize: Clone {
|
||||
///
|
||||
/// The actual type should be suitable for use as storage for a vector of Vs. The type signature
|
||||
/// essentially requires that this "just" be a wrapper around Vec<V>, but in some cases we may be
|
||||
/// able to implement serialization / deserialization, and potentially other operations, more
|
||||
/// efficiently with such a wrapper than we would by using Vec<Vox>.
|
||||
pub trait VolSize<V>/*: Clone + core::ops::Deref<Target=Vec<V>> + core::ops::DerefMut + From<Vec<V>>*/ {
|
||||
const SIZE: Vec3<u32>;
|
||||
}
|
||||
|
||||
|
@ -49,16 +49,15 @@ pub enum ChunkError {
|
||||
/// index buffer can consist of `u8`s. This keeps the space requirement for the
|
||||
/// index buffer as low as 4 cache lines.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Chunk<V, S: VolSize, M> {
|
||||
pub struct Chunk<V, S: VolSize<V>, M> {
|
||||
indices: Vec<u8>, /* TODO (haslersn): Box<[u8; S::SIZE.x * S::SIZE.y * S::SIZE.z]>, this is
|
||||
* however not possible in Rust yet */
|
||||
vox: Vec<V>,
|
||||
vox: S,
|
||||
default: V,
|
||||
meta: M,
|
||||
phantom: PhantomData<S>,
|
||||
}
|
||||
|
||||
impl<V, S: VolSize, M> Chunk<V, S, M> {
|
||||
impl<V, S: VolSize<V>, M> Chunk<V, S, M> {
|
||||
pub const GROUP_COUNT: Vec3<u32> = Vec3::new(
|
||||
S::SIZE.x / Self::GROUP_SIZE.x,
|
||||
S::SIZE.y / Self::GROUP_SIZE.y,
|
||||
@ -74,10 +73,15 @@ impl<V, S: VolSize, M> Chunk<V, S, M> {
|
||||
);
|
||||
const GROUP_VOLUME: u32 = [Self::VOLUME / 256, 1][(Self::VOLUME < 256) as usize];
|
||||
const VOLUME: u32 = (S::SIZE.x * S::SIZE.y * S::SIZE.z) as u32;
|
||||
}
|
||||
|
||||
impl<V, S: core::ops::DerefMut<Target=Vec<V>> + VolSize<V>, M> Chunk<V, S, M> {
|
||||
/// Creates a new `Chunk` with the provided dimensions and all voxels filled
|
||||
/// with duplicates of the provided voxel.
|
||||
pub fn filled(default: V, meta: M) -> Self {
|
||||
pub fn filled(default: V, meta: M) -> Self
|
||||
where
|
||||
S: From<Vec<V>>,
|
||||
{
|
||||
// TODO (haslersn): Alter into compile time assertions
|
||||
//
|
||||
// An extent is valid if it fulfils the following conditions.
|
||||
@ -111,10 +115,9 @@ impl<V, S: VolSize, M> Chunk<V, S, M> {
|
||||
|
||||
Self {
|
||||
indices: vec![255; Self::GROUP_COUNT_TOTAL as usize],
|
||||
vox: Vec::new(),
|
||||
vox: Vec::new().into(),
|
||||
default,
|
||||
meta,
|
||||
phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
@ -122,6 +125,7 @@ impl<V, S: VolSize, M> Chunk<V, S, M> {
|
||||
pub fn defragment(&mut self)
|
||||
where
|
||||
V: zerocopy::AsBytes + Clone + Eq + Hash,
|
||||
S: From<Vec<V>>,
|
||||
[(); { core::mem::size_of::<V>() }]:,
|
||||
{
|
||||
// First, construct a HashMap with max capacity equal to GROUP_COUNT (since each
|
||||
@ -179,7 +183,7 @@ impl<V, S: VolSize, M> Chunk<V, S, M> {
|
||||
let mut new_vox =
|
||||
Vec::with_capacity(Self::GROUP_COUNT_TOTAL as usize - default_groups.len());
|
||||
let num_groups = self.num_groups();
|
||||
let mut indices = &mut self.indices[..Self::GROUP_COUNT_TOTAL as usize];
|
||||
let indices = &mut self.indices[..Self::GROUP_COUNT_TOTAL as usize];
|
||||
indices
|
||||
.iter_mut()
|
||||
.enumerate()
|
||||
@ -209,7 +213,7 @@ impl<V, S: VolSize, M> Chunk<V, S, M> {
|
||||
});
|
||||
|
||||
// Finally, reset our vox and default values to the new ones.
|
||||
self.vox = new_vox;
|
||||
self.vox = new_vox.into();
|
||||
self.default = new_default;
|
||||
}
|
||||
|
||||
@ -250,7 +254,8 @@ impl<V, S: VolSize, M> Chunk<V, S, M> {
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn idx_unchecked(&self, pos: Vec3<i32>) -> Option<usize> {
|
||||
fn idx_unchecked(&self, pos: Vec3<i32>) -> Option<usize>
|
||||
{
|
||||
let grp_idx = Self::grp_idx(pos);
|
||||
let rel_idx = Self::rel_idx(pos);
|
||||
let base = u32::from(self.indices[grp_idx as usize]);
|
||||
@ -290,6 +295,7 @@ impl<V, S: VolSize, M> Chunk<V, S, M> {
|
||||
#[inline(always)]
|
||||
fn set_unchecked(&mut self, pos: Vec3<i32>, vox: V) -> V
|
||||
where
|
||||
S: core::ops::DerefMut<Target=Vec<V>>,
|
||||
V: Clone + PartialEq,
|
||||
{
|
||||
if vox != self.default {
|
||||
@ -303,16 +309,16 @@ impl<V, S: VolSize, M> Chunk<V, S, M> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<V, S: VolSize, M> BaseVol for Chunk<V, S, M> {
|
||||
impl<V, S: VolSize<V>, M> BaseVol for Chunk<V, S, M> {
|
||||
type Error = ChunkError;
|
||||
type Vox = V;
|
||||
}
|
||||
|
||||
impl<V, S: VolSize, M> RasterableVol for Chunk<V, S, M> {
|
||||
impl<V, S: VolSize<V>, M> RasterableVol for Chunk<V, S, M> {
|
||||
const SIZE: Vec3<u32> = S::SIZE;
|
||||
}
|
||||
|
||||
impl<V, S: VolSize, M> ReadVol for Chunk<V, S, M> {
|
||||
impl<V, S: core::ops::DerefMut<Target=Vec<V>> + VolSize<V>, M> ReadVol for Chunk<V, S, M> {
|
||||
#[inline(always)]
|
||||
fn get(&self, pos: Vec3<i32>) -> Result<&Self::Vox, Self::Error> {
|
||||
if !pos
|
||||
@ -326,7 +332,10 @@ impl<V, S: VolSize, M> ReadVol for Chunk<V, S, M> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<V: Clone + PartialEq, S: VolSize, M> WriteVol for Chunk<V, S, M> {
|
||||
impl<V: Clone + PartialEq, S: VolSize<V>, M> WriteVol for Chunk<V, S, M>
|
||||
where
|
||||
S: core::ops::DerefMut<Target=Vec<V>>,
|
||||
{
|
||||
#[inline(always)]
|
||||
fn set(&mut self, pos: Vec3<i32>, vox: Self::Vox) -> Result<Self::Vox, Self::Error> {
|
||||
if !pos
|
||||
@ -340,7 +349,7 @@ impl<V: Clone + PartialEq, S: VolSize, M> WriteVol for Chunk<V, S, M> {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ChunkPosIter<V, S: VolSize, M> {
|
||||
pub struct ChunkPosIter<V, S: VolSize<V>, M> {
|
||||
// Store as `u8`s so as to reduce memory footprint.
|
||||
lb: Vec3<i32>,
|
||||
ub: Vec3<i32>,
|
||||
@ -348,7 +357,7 @@ pub struct ChunkPosIter<V, S: VolSize, M> {
|
||||
phantom: PhantomData<Chunk<V, S, M>>,
|
||||
}
|
||||
|
||||
impl<V, S: VolSize, M> ChunkPosIter<V, S, M> {
|
||||
impl<V, S: VolSize<V>, M> ChunkPosIter<V, S, M> {
|
||||
fn new(lower_bound: Vec3<i32>, upper_bound: Vec3<i32>) -> Self {
|
||||
// If the range is empty, then we have the special case `ub = lower_bound`.
|
||||
let ub = if lower_bound.map2(upper_bound, |l, u| l < u).reduce_and() {
|
||||
@ -365,7 +374,7 @@ impl<V, S: VolSize, M> ChunkPosIter<V, S, M> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<V, S: VolSize, M> Iterator for ChunkPosIter<V, S, M> {
|
||||
impl<V, S: VolSize<V>, M> Iterator for ChunkPosIter<V, S, M> {
|
||||
type Item = Vec3<i32>;
|
||||
|
||||
#[inline(always)]
|
||||
@ -420,12 +429,12 @@ impl<V, S: VolSize, M> Iterator for ChunkPosIter<V, S, M> {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ChunkVolIter<'a, V, S: VolSize, M> {
|
||||
pub struct ChunkVolIter<'a, V, S: VolSize<V>, M> {
|
||||
chunk: &'a Chunk<V, S, M>,
|
||||
iter_impl: ChunkPosIter<V, S, M>,
|
||||
}
|
||||
|
||||
impl<'a, V, S: VolSize, M> Iterator for ChunkVolIter<'a, V, S, M> {
|
||||
impl<'a, V, S: core::ops::DerefMut<Target=Vec<V>> + VolSize<V>, M> Iterator for ChunkVolIter<'a, V, S, M> {
|
||||
type Item = (Vec3<i32>, &'a V);
|
||||
|
||||
#[inline(always)]
|
||||
@ -436,7 +445,7 @@ impl<'a, V, S: VolSize, M> Iterator for ChunkVolIter<'a, V, S, M> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<V, S: VolSize, M> Chunk<V, S, M> {
|
||||
impl<V, S: VolSize<V>, M> Chunk<V, S, M> {
|
||||
/// It's possible to obtain a positional iterator without having a `Chunk`
|
||||
/// instance.
|
||||
pub fn pos_iter(lower_bound: Vec3<i32>, upper_bound: Vec3<i32>) -> ChunkPosIter<V, S, M> {
|
||||
@ -444,7 +453,7 @@ impl<V, S: VolSize, M> Chunk<V, S, M> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, V, S: VolSize, M> IntoPosIterator for &'a Chunk<V, S, M> {
|
||||
impl<'a, V, S: VolSize<V>, M> IntoPosIterator for &'a Chunk<V, S, M> {
|
||||
type IntoIter = ChunkPosIter<V, S, M>;
|
||||
|
||||
fn pos_iter(self, lower_bound: Vec3<i32>, upper_bound: Vec3<i32>) -> Self::IntoIter {
|
||||
@ -452,7 +461,7 @@ impl<'a, V, S: VolSize, M> IntoPosIterator for &'a Chunk<V, S, M> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, V, S: VolSize, M> IntoVolIterator<'a> for &'a Chunk<V, S, M> {
|
||||
impl<'a, V, S: core::ops::DerefMut<Target=Vec<V>> + VolSize<V>, M> IntoVolIterator<'a> for &'a Chunk<V, S, M> {
|
||||
type IntoIter = ChunkVolIter<'a, V, S, M>;
|
||||
|
||||
fn vol_iter(self, lower_bound: Vec3<i32>, upper_bound: Vec3<i32>) -> Self::IntoIter {
|
||||
|
@ -310,6 +310,19 @@ fn dungeon(c: &mut Criterion) {
|
||||
});
|
||||
});
|
||||
|
||||
c.bench_function("deserialize_chunk", |b| {
|
||||
// let chunk_pos = (world.sim().map_size_lg().chunks() >> 1).as_();
|
||||
// let chunk_pos = Vec2::new(9500 / 32, 29042 / 32);
|
||||
// let chunk_pos = Vec2::new(26944 / 32, 26848 / 32);
|
||||
let chunk_pos = Vec2::new(842, 839);
|
||||
let chunk = world.generate_chunk(index.as_index_ref(), chunk_pos, || false, None).unwrap().0;
|
||||
let serialized = bincode::serialize(&chunk).unwrap();
|
||||
// let chunk_pos = Vec2::new(24507/32, 20682/32);
|
||||
// let chunk_pos = Vec2::new(19638/32, 19621/32);
|
||||
b.iter(|| {
|
||||
black_box(bincode::deserialize::<TerrainChunk>(&serialized).unwrap());
|
||||
});
|
||||
});
|
||||
|
||||
/* c.bench_function("generate_dungeon", |b| {
|
||||
let mut rng = rand::rngs::StdRng::from_seed(seed);
|
||||
|
@ -78,8 +78,8 @@ fn do_deflate_flate2<const LEVEL: u32>(data: &[u8]) -> Vec<u8> {
|
||||
encoder.finish().expect("Failed to finish compression!")
|
||||
}
|
||||
|
||||
fn chonk_to_dyna<V: Clone, S: RectVolSize, M: Clone, A: Access>(
|
||||
chonk: &Chonk<V, S, M>,
|
||||
fn chonk_to_dyna<V: Clone, Storage, S: RectVolSize, M: Clone, A: Access>(
|
||||
chonk: &Chonk<V, Storage, S, M>,
|
||||
block: V,
|
||||
) -> Dyna<V, M, A> {
|
||||
let mut dyna = Dyna::<V, M, A>::filled(
|
||||
|
Loading…
Reference in New Issue
Block a user