diff --git a/common/src/terrain/chonk.rs b/common/src/terrain/chonk.rs index e943df42ca..18f86d0281 100644 --- a/common/src/terrain/chonk.rs +++ b/common/src/terrain/chonk.rs @@ -94,6 +94,87 @@ impl>, S: RectVolSize, M: Clone> C self.z_offset + (self.sub_chunks.len() as u32 * SubChunkSize::::SIZE.z) as i32 } + /// Flattened version of this chonk. + /// + /// It's not acutally flat, it just skips the indirection through the index. The idea is to + /// use a constant stride for row access so the prefetcher can process it more easily. + pub fn make_flat<'a>(&'a self, below_slice: &'a [V], above_slice: &'a [V]) -> Vec<&'a [V]> + where + V: Copy + Eq, + [(); SubChunk::::GROUP_VOLUME as usize]:, + { + let mut flat = Vec::with_capacity(self.sub_chunks.len() * /*SubChunkSize::::SIZE.z as usize **/ + /* SubChunk::::VOLUME as usize */ + SubChunk::::GROUP_COUNT_TOTAL as usize); + self.sub_chunks.iter().enumerate().for_each(|(idx, sub_chunk)| { + let slice = if sub_chunk.default() == &self.below { + below_slice + } else { + above_slice + }; + sub_chunk.push_flat(&mut flat, slice); + }); + flat + } + + #[inline] + /// Approximate max z. + /// + /// NOTE: Column must be in range; results are undefined otherwise. + // #[allow(unsafe_code)] + pub fn get_max_z_col(&self, col: Vec2) -> i32 + where V: Eq, + { + self.get_max_z() + /* let group_size = SubChunk::::GROUP_SIZE; + let group_count = SubChunk::::GROUP_COUNT; + let col = (col.as_::() % Self::RECT_SIZE); + // FIXME: Make abstract. + let grp_pos = col.map2(group_size.xy(), |e, s| e / s); + let grp_idx_2d = grp_pos.x * (group_count.y * group_count.z) + + (grp_pos.y * group_count.z); + /* dbg!(col, group_size, group_count, grp_pos, grp_idx_2d); */ + + /* let grp_idx: [u8; SubChunk::GROUP_SIZE.z] = + [grp_idx_2d, grp_idx_2d + 1, grp_idx_2d + 2, grp_idx_2d + 3]; */ + // let grp_idx = Chunk::grp_idx(col.with_z(0)); + let grp_idx_2d = grp_idx_2d as u8; + let grp_idx0 = grp_idx_2d as usize; + let grp_idx1 = (grp_idx_2d + 1) as usize; + let grp_idx2 = (grp_idx_2d + 2) as usize; + let grp_idx3 = (grp_idx_2d + 3) as usize; + // Find first subchunk with either a different default from our above, or whose group at + // the relevant index is not the default. + let group_offset_z = self.sub_chunks.iter().enumerate().rev().find_map(|(sub_chunk_idx, sub_chunk)| { + if sub_chunk.default() != &self.above { + return Some((sub_chunk_idx + 1) * 4); + } + let num_groups = sub_chunk.num_groups() as u8; + let indices = /*&*/sub_chunk.indices()/*[0..256]*/; + unsafe { + let idx0 = *indices.get_unchecked(grp_idx0); + let idx1 = *indices.get_unchecked(grp_idx2); + let idx2 = *indices.get_unchecked(grp_idx1); + let idx3 = *indices.get_unchecked(grp_idx3); + if idx3 >= num_groups { + return Some(sub_chunk_idx * 4 + grp_idx3); + } + if idx2 >= num_groups { + return Some(sub_chunk_idx * 4 + grp_idx2); + } + if idx1 >= num_groups { + return Some(sub_chunk_idx * 4 + grp_idx1); + } + if idx0 >= num_groups { + return Some(sub_chunk_idx * 4 + grp_idx0); + } + } + return None; + }).unwrap_or(0); + let offset: u32 = group_offset_z as u32 * SubChunk::::GROUP_SIZE.z; + self.get_min_z() + offset as i32 */ + } + pub fn sub_chunks_len(&self) -> usize { self.sub_chunks.len() } pub fn sub_chunk_groups(&self) -> usize { diff --git a/common/src/volumes/chunk.rs b/common/src/volumes/chunk.rs index 82763ad67c..57446cdc6e 100644 --- a/common/src/volumes/chunk.rs +++ b/common/src/volumes/chunk.rs @@ -67,15 +67,15 @@ impl, M> Chunk { S::SIZE.z / Self::GROUP_SIZE.z, ); /// `GROUP_COUNT_TOTAL` is always `256`, except if `VOLUME < 256` - const GROUP_COUNT_TOTAL: u32 = Self::VOLUME / Self::GROUP_VOLUME; + pub const GROUP_COUNT_TOTAL: u32 = Self::VOLUME / Self::GROUP_VOLUME; const GROUP_LONG_SIDE_LEN: u32 = 1 << ((Self::GROUP_VOLUME * 4 - 1).count_ones() / 3); - const GROUP_SIZE: Vec3 = Vec3::new( + pub const GROUP_SIZE: Vec3 = Vec3::new( Self::GROUP_LONG_SIDE_LEN, Self::GROUP_LONG_SIDE_LEN, Self::GROUP_VOLUME / (Self::GROUP_LONG_SIDE_LEN * Self::GROUP_LONG_SIDE_LEN), ); - const GROUP_VOLUME: u32 = [Self::VOLUME / 256, 1][(Self::VOLUME < 256) as usize]; - const VOLUME: u32 = (S::SIZE.x * S::SIZE.y * S::SIZE.z) as u32; + pub const GROUP_VOLUME: u32 = [Self::VOLUME / 256, 1][(Self::VOLUME < 256) as usize]; + pub const VOLUME: u32 = (S::SIZE.x * S::SIZE.y * S::SIZE.z) as u32; } impl> + VolSize, M> Chunk { @@ -128,6 +128,72 @@ impl> + VolSize, M> Chunk { &self.vox } + pub fn default(&self) -> &V { + &self.default + } + + pub fn indices(&self) -> &[u8] { + &self.indices + } + + /// Flattened version of this subchunk. + /// + /// It's not acutally flat, it just skips the indirection through the index. The idea is to + /// use a constant stride for row access so the prefetcher can process it more easily. + pub fn push_flat<'a>(&'a self, flat: &mut Vec<&'a [V]>, default: &'a [V]) + where + V: Copy, + [(); Self::GROUP_VOLUME as usize]: + { + let vox = &self.vox; + // let default = &[self.default; Self::GROUP_VOLUME as usize]; + self.indices + .iter() + .enumerate() + .for_each(|(grp_idx, &base)| { + let start = usize::from(base) * Self::GROUP_VOLUME as usize; + let end = start + Self::GROUP_VOLUME as usize; + if let Some(group) = vox.get(start..end) { + flat.push(group); + /* flat.extend_from_slice(group); */ + /* flat[grp_idx / 64 * 4096 + grp_idx % 64 / 8 * 8 + grp_idx % 8..]copy_from_slice(group[0 * 64..1 * 64]); + // 3*4096+(8*7+8)*16 + // 3*4096+7*128+7*4 + // (3*1024+7*32+7)*4 + // (3*1024+7*32+7)*4 + 1024 + // (3*1024+7*32+7)*4 + 1024 * 2 + // (3*1024+7*32+7)*4 + 1024 * 3 + // + // 1024*15 + 7*4*32 + 7*4 + // 1024*15 + (7*4+1)*32 + 7*4 + // 1024*15 + (7*4+3)*32 + 7*4 + // 1024*15 + (7*4+3)*32 + 7*4 + flat[grp_idx * Self::GROUP_VOLUME..end].copy_from_slice(group[1 * 64..2 * 64]); + flat[grp_idx * Self::GROUP_VOLUME..end].copy_from_slice(group[2 * 64..3 * 64]); + flat[grp_idx * Self::GROUP_VOLUME..end].copy_from_slice(group[3 * 64..4 * 64]); + flat[flat + base] + // Check to see if all blocks in this group are the same. + // NOTE: First element must exist because GROUP_VOLUME ≥ 1 + let first = &group[0]; + let first_ = first.as_bytes(); + // View group as bytes to benefit from specialization on [u8]. + let group = group.as_bytes(); + /* let mut group = group.iter(); + let first = group.next().expect("group_volume ≥ 1"); */ + if group.array_chunks::<{ core::mem::size_of::() }>().all(|block| block == first_) { + // all blocks in the group were the same, so add our position to this entry + // in the hashmap. + map.entry(first).or_insert_with(/*vec![]*//*bitvec::bitarr![0; chunk::group_count_total]*/|| empty_bits)./*push*/set(grp_idx, true); + } */ + } else { + // this slot is empty (i.e. has the default value). + flat./*extend_from_slice*/push(default); + /* map.entry(default).or_insert_with(|| empty_bits)./*push*/set(grp_idx, true); + */ + } + }); + } + /// Compress this subchunk by frequency. pub fn defragment(&mut self) where @@ -230,6 +296,7 @@ impl> + VolSize, M> Chunk { /// Get a mutable reference to the internal metadata. pub fn metadata_mut(&mut self) -> &mut M { &mut self.meta } + #[inline(always)] pub fn num_groups(&self) -> usize { self.vox.len() / Self::GROUP_VOLUME as usize } /// Returns `Some(v)` if the block is homogeneous and contains nothing but @@ -245,19 +312,19 @@ impl> + VolSize, M> Chunk { } #[inline(always)] - fn grp_idx(pos: Vec3) -> u32 { + pub fn grp_idx(pos: Vec3) -> u32 { let grp_pos = pos.map2(Self::GROUP_SIZE, |e, s| e as u32 / s); - (grp_pos.x * (Self::GROUP_COUNT.y * Self::GROUP_COUNT.z)) - + (grp_pos.y * Self::GROUP_COUNT.z) - + (grp_pos.z) + (grp_pos.z * (Self::GROUP_COUNT.y * Self::GROUP_COUNT.x)) + + (grp_pos.y * Self::GROUP_COUNT.x) + + (grp_pos.x) } #[inline(always)] - fn rel_idx(pos: Vec3) -> u32 { + pub fn rel_idx(pos: Vec3) -> u32 { let rel_pos = pos.map2(Self::GROUP_SIZE, |e, s| e as u32 % s); - (rel_pos.x * (Self::GROUP_SIZE.y * Self::GROUP_SIZE.z)) - + (rel_pos.y * Self::GROUP_SIZE.z) - + (rel_pos.z) + (rel_pos.z * (Self::GROUP_SIZE.y * Self::GROUP_SIZE.x)) + + (rel_pos.y * Self::GROUP_SIZE.x) + + (rel_pos.x) } #[inline(always)] @@ -384,7 +451,7 @@ impl, M> ChunkPosIter { impl, M> Iterator for ChunkPosIter { type Item = Vec3; - #[inline(always)] + /* #[inline(always)] fn next(&mut self) -> Option { if self.pos.x >= self.ub.x { return None; @@ -433,9 +500,9 @@ impl, M> Iterator for ChunkPosIter { self.pos.x = (self.pos.x | (Chunk::::GROUP_SIZE.x as i32 - 1)) + 1; res - } + } */ - /* fn next(&mut self) -> Option { + fn next(&mut self) -> Option { if self.pos.z >= self.ub.z { return None; } @@ -483,7 +550,7 @@ impl, M> Iterator for ChunkPosIter { self.pos.z = (self.pos.z | (Chunk::::GROUP_SIZE.z as i32 - 1)) + 1; res - } */ + } } pub struct ChunkVolIter<'a, V, S: VolSize, M> { diff --git a/world/benches/site2.rs b/world/benches/site2.rs index 2f4619a881..879633227d 100644 --- a/world/benches/site2.rs +++ b/world/benches/site2.rs @@ -306,6 +306,7 @@ fn dungeon(c: &mut Criterion) { // let chunk_pos = Vec2::new(24507/32, 20682/32); // let chunk_pos = Vec2::new(19638/32, 19621/32); let chunk_pos = Vec2::new(21488/32, 13584/32); + // let chunk_pos = Vec2::new(21488/32 + 5, 13584/32 + 5); b.iter(|| { black_box(world.generate_chunk(index.as_index_ref(), chunk_pos, || false, None)); }); @@ -319,6 +320,7 @@ fn dungeon(c: &mut Criterion) { // let chunk_pos = Vec2::new(24507/32, 20682/32); // let chunk_pos = Vec2::new(19638/32, 19621/32); let chunk_pos = Vec2::new(21488/32, 13584/32); + // let chunk_pos = Vec2::new(21488/32 + 5, 13584/32 + 5); let chunk = world.generate_chunk(index.as_index_ref(), chunk_pos, || false, None).unwrap().0; /* println!("{:?}", chunk.sub_chunks_len()); let chunk = chunk.sub_chunks().next().unwrap(); */ diff --git a/world/src/block/mod.rs b/world/src/block/mod.rs index a49f91b173..06f4e84ae6 100644 --- a/world/src/block/mod.rs +++ b/world/src/block/mod.rs @@ -66,7 +66,7 @@ impl<'a> BlockGen<'a> { surface_color, sub_surface_color, stone_col, - snow_cover, + /* snow_cover, */ cliff_offset, cliff_height, ice_depth, @@ -131,10 +131,10 @@ impl<'a> BlockGen<'a> { let col = Lerp::lerp(sub_surface_color, surface_color, grass_factor); if grass_factor < 0.7 { Block::new(BlockKind::Earth, col.map(|e| (e * 255.0) as u8)) - } else if snow_cover { + }/* else if snow_cover { //if temp < CONFIG.snow_temp + 0.031 { Block::new(BlockKind::Snow, col.map(|e| (e * 255.0) as u8)) - } else { + }*/ else { Block::new(BlockKind::Grass, col.map(|e| (e * 255.0) as u8)) } }) diff --git a/world/src/canvas.rs b/world/src/canvas.rs index 582d24e0f5..dbd1787b35 100644 --- a/world/src/canvas.rs +++ b/world/src/canvas.rs @@ -237,7 +237,7 @@ impl<'a> Canvas<'a> { let mut above = true; for z in (structure.get_bounds().min.z..structure.get_bounds().max.z).rev() { if let Ok(sblock) = structure.get(rpos2d.with_z(z)) { - let mut add_snow = false; + /* let mut add_snow = false; */ let _ = canvas.map(wpos2d.with_z(origin.z + z), |block| { if let Some(new_block) = block_from_structure( info.index, @@ -250,9 +250,9 @@ impl<'a> Canvas<'a> { info.calendar(), ) { if !new_block.is_air() { - if with_snow && col.snow_cover && above { + /* if with_snow && col.snow_cover && above { add_snow = true; - } + } */ above = false; } new_block @@ -261,12 +261,12 @@ impl<'a> Canvas<'a> { } }); - if add_snow { + /* if add_snow { let _ = canvas.set( wpos2d.with_z(origin.z + z + 1), Block::new(BlockKind::Snow, Rgb::new(210, 210, 255)), ); - } + } */ } } }); diff --git a/world/src/column/mod.rs b/world/src/column/mod.rs index 63d3170fd9..34910fe022 100644 --- a/world/src/column/mod.rs +++ b/world/src/column/mod.rs @@ -1488,7 +1488,7 @@ impl<'a, 'b> Sampler<'a, 'b> for ColumnGen1D<'a, 'b> { let warm_stone: Rgb = warm_stone.into(); let beach_sand = beach_sand.into(); let desert_sand = desert_sand.into(); - let snow = snow.into(); + let snow: Rgb = snow.into(); let stone_col = stone_col.into(); let dirt_low: Rgb = dirt_low.into(); let dirt_high = dirt_high.into(); @@ -1686,18 +1686,18 @@ impl<'a, 'b> Sampler<'a, 'b> for ColumnGen1D<'a, 'b> { let warp = riverless_alt_delta + warp; let alt = alt + warp; - let (snow_delta, ground, sub_surface_color) = if snow_cover && alt > water_level { + /* let (/*snow_delta, *//*ground, */sub_surface_color,) = if snow_cover && alt > water_level { // Allow snow cover. ( - 1.0 - snow_factor.max(0.0), - Rgb::lerp(snow, ground, snow_factor), + /* 1.0 - snow_factor.max(0.0),*/ + /* Rgb::lerp(snow, ground, snow_factor), */ Lerp::lerp(sub_surface_color, ground, basement_sub_alt.mul(-0.15)), ) } else { - (0.0, ground, sub_surface_color) - }; - let alt = alt + snow_delta; - let basement_sub_alt = basement_sub_alt - snow_delta; + (/*0.0, *//*ground, */sub_surface_color,) + }; */ + /* let alt = alt + snow_delta; + let basement_sub_alt = basement_sub_alt - snow_delta; */ // Make river banks not have grass let ground = water_dist diff --git a/world/src/layer/rock.rs b/world/src/layer/rock.rs index 99623e1ca8..333b1c8d7b 100644 --- a/world/src/layer/rock.rs +++ b/world/src/layer/rock.rs @@ -91,7 +91,7 @@ pub fn apply_rocks_to(canvas: &mut Canvas, _dynamic_rng: &mut impl Rng) { continue; } - let mut is_top = true; + /* let mut is_top = true; */ let mut last_block = Block::empty(); for z in (bounds.min.z..bounds.max.z).rev() { let wpos = Vec3::new(wpos2d.x, wpos2d.y, rock.wpos.z + z); @@ -104,14 +104,14 @@ pub fn apply_rocks_to(canvas: &mut Canvas, _dynamic_rng: &mut impl Rng) { rock.kind .take_sample(model_pos, rock.seed, last_block, col) .map(|block| { - if col.snow_cover && is_top && block.is_filled() { + /* if col.snow_cover && is_top && block.is_filled() { canvas.set( wpos + Vec3::unit_z(), Block::new(BlockKind::Snow, Rgb::new(210, 210, 255)), ); - } + } */ canvas.set(wpos, block); - is_top = false; + /* is_top = false; */ last_block = block; }); } diff --git a/world/src/lib.rs b/world/src/lib.rs index 6bad1bfefb..62a60e2a8c 100644 --- a/world/src/lib.rs +++ b/world/src/lib.rs @@ -63,7 +63,7 @@ use common::{ terrain::{ Block, BlockKind, SpriteKind, TerrainChunk, TerrainChunkMeta, TerrainChunkSize, TerrainGrid, }, - vol::{ReadVol, RectVolSize, WriteVol}, + vol::{IntoPosIterator, ReadVol, RectVolSize, WriteVol}, }; use common_net::msg::{world_msg, WorldMapMsg}; use rand::{prelude::*, Rng}; @@ -343,6 +343,7 @@ impl World { let mut delta2 = 0; let mut delta3 = 0; let mut delta4 = 0; + let mut has_snow = false; for y in 0..TerrainChunkSize::RECT_SIZE.y as i32 { for x in 0..TerrainChunkSize::RECT_SIZE.x as i32 { if should_continue() { @@ -358,6 +359,8 @@ impl World { _ => continue, }; + has_snow |= z_cache.sample.snow_cover; + // dbg!(chunk_pos, x, y, z_cache.get_z_limits()); let (min_z, max_z) = z_cache.get_z_limits(); /* let max_z = min_z + 1.0; @@ -539,6 +542,66 @@ impl World { ) }); + // Apply snow cover. + if has_snow { + let snow = Block::new(BlockKind::Snow, Rgb::new(210, 210, 255)); + // NOTE: We assume throughout Veloren that u32 fits in usize (we need to make this a static + // assertion). RECT_SIZE.product() is statically valid. + let mut snow_blocks = Vec::with_capacity(TerrainChunkSize::RECT_SIZE.product() as usize * 3); + let air_slice = [air; common::terrain::TerrainSubChunk::GROUP_VOLUME as usize]; + let stone_slice = [stone; common::terrain::TerrainSubChunk::GROUP_VOLUME as usize]; + let flat = chunk.make_flat(&stone_slice, &air_slice); + zcache_grid.iter() + .filter(|(_, col_sample)| col_sample.snow_cover) + .for_each(|(wpos_delta, col_sample)| { + let wpos2d = /*chunk_wpos2d + */wpos_delta; + let iter = /*chunk.pos_iter(wpos2d.with_z(chunk.get_min_z()), wpos2d.with_z(chunk.get_max_z()))*/ + (0..chunk.get_max_z_col(wpos_delta) - chunk.get_min_z()); + // dbg!(wpos_delta, &iter); + let mut above_block_is_air = true; + for z in iter.rev() { + let mut pos = wpos2d.with_z(z); + let grp_id = common::terrain::TerrainSubChunk::grp_idx(pos) as usize; + let rel_id = common::terrain::TerrainSubChunk::rel_idx(pos) as usize; + let block = flat[grp_id][rel_id]; + let kind = block.kind(); + // dbg!(pos, block, above_block_is_air, kind.is_filled()); + if above_block_is_air && kind.is_filled() { + // Place a block above this one. + pos.z += chunk.get_min_z() + 1; + snow_blocks.push(pos); + } + let is_air = kind.is_air(); + above_block_is_air = is_air; + if !(is_air || kind == BlockKind::Leaves) { + break; + } + } + /* for z in iter.rev() { + let mut pos = wpos2d.with_z(z); + let grp_id = common::terrain::TerrainSubChunk::grp_idx(pos) as usize; + let rel_id = common::terrain::TerrainSubChunk::rel_idx(pos) as usize; + let block = flat[grp_id][rel_id]; + let kind = block.kind(); + // dbg!(pos, block, above_block_is_air, kind.is_filled()); + if kind.is_filled() { + // Place a block above this one. + pos.z += chunk.get_min_z() + 1; + snow_blocks.push(pos); + break; + } + let is_air = kind.is_air(); + if !is_air { + break; + } + } */ + }); + + snow_blocks.into_iter().for_each(|pos| { + let _ = chunk.set(pos, snow); + }); + } + // Finally, defragment to minimize space consumption. chunk.defragment();