mirror of
https://gitlab.com/veloren/veloren.git
synced 2024-08-30 18:12:32 +00:00
Add a bandwidth-based heuristic for chunk compression.
This commit is contained in:
parent
cdc2eccda8
commit
30cae40b82
common/net
server/src/sys
world/src/bin
@ -21,7 +21,6 @@ num-traits = "0.2"
|
|||||||
sum_type = "0.2.0"
|
sum_type = "0.2.0"
|
||||||
vek = { version = "=0.14.1", features = ["serde"] }
|
vek = { version = "=0.14.1", features = ["serde"] }
|
||||||
tracing = { version = "0.1", default-features = false }
|
tracing = { version = "0.1", default-features = false }
|
||||||
inline_tweak = "1.0.2"
|
|
||||||
|
|
||||||
# Data structures
|
# Data structures
|
||||||
hashbrown = { version = "0.9", features = ["rayon", "serde", "nightly"] }
|
hashbrown = { version = "0.9", features = ["rayon", "serde", "nightly"] }
|
||||||
|
@ -12,7 +12,7 @@ use std::{
|
|||||||
io::{Read, Write},
|
io::{Read, Write},
|
||||||
marker::PhantomData,
|
marker::PhantomData,
|
||||||
};
|
};
|
||||||
use tracing::{trace, warn};
|
use tracing::warn;
|
||||||
use vek::*;
|
use vek::*;
|
||||||
|
|
||||||
/// Wrapper for compressed, serialized data (for stuff that doesn't use the
|
/// Wrapper for compressed, serialized data (for stuff that doesn't use the
|
||||||
@ -37,12 +37,6 @@ impl<T: Serialize> CompressedData<T> {
|
|||||||
let mut encoder = DeflateEncoder::new(Vec::new(), Compression::new(level));
|
let mut encoder = DeflateEncoder::new(Vec::new(), Compression::new(level));
|
||||||
encoder.write_all(&*uncompressed).expect(EXPECT_MSG);
|
encoder.write_all(&*uncompressed).expect(EXPECT_MSG);
|
||||||
let compressed = encoder.finish().expect(EXPECT_MSG);
|
let compressed = encoder.finish().expect(EXPECT_MSG);
|
||||||
trace!(
|
|
||||||
"compressed {}, uncompressed {}, ratio {}",
|
|
||||||
compressed.len(),
|
|
||||||
uncompressed.len(),
|
|
||||||
compressed.len() as f32 / uncompressed.len() as f32
|
|
||||||
);
|
|
||||||
CompressedData {
|
CompressedData {
|
||||||
data: compressed,
|
data: compressed,
|
||||||
compressed: true,
|
compressed: true,
|
||||||
@ -106,6 +100,10 @@ impl PackingFormula for TallPacking {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A wide, short image. Shares the advantage of not wasting space with
|
||||||
|
/// TallPacking, but faster to compress and smaller since PNG compresses each
|
||||||
|
/// row indepedently, so a wide image has fewer calls to the compressor. FLIP_X
|
||||||
|
/// has the same spatial locality preserving behavior as with TallPacking.
|
||||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
|
||||||
pub struct WidePacking<const FLIP_X: bool>();
|
pub struct WidePacking<const FLIP_X: bool>();
|
||||||
|
|
||||||
@ -602,9 +600,9 @@ impl<const N: u32> VoxelImageDecoding for QuadPngEncoding<N> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
|
||||||
pub struct TriPngEncoding;
|
pub struct TriPngEncoding<const AVERAGE_PALETTE: bool>();
|
||||||
|
|
||||||
impl VoxelImageEncoding for TriPngEncoding {
|
impl<const AVERAGE_PALETTE: bool> VoxelImageEncoding for TriPngEncoding<AVERAGE_PALETTE> {
|
||||||
#[allow(clippy::type_complexity)]
|
#[allow(clippy::type_complexity)]
|
||||||
type Output = CompressedData<(Vec<u8>, Vec<Rgb<u8>>, [usize; 3])>;
|
type Output = CompressedData<(Vec<u8>, Vec<Rgb<u8>>, [usize; 3])>;
|
||||||
#[allow(clippy::type_complexity)]
|
#[allow(clippy::type_complexity)]
|
||||||
@ -628,7 +626,9 @@ impl VoxelImageEncoding for TriPngEncoding {
|
|||||||
ws.0.put_pixel(x, y, image::Luma([kind as u8]));
|
ws.0.put_pixel(x, y, image::Luma([kind as u8]));
|
||||||
ws.1.put_pixel(x, y, image::Luma([0]));
|
ws.1.put_pixel(x, y, image::Luma([0]));
|
||||||
ws.2.put_pixel(x, y, image::Luma([0]));
|
ws.2.put_pixel(x, y, image::Luma([0]));
|
||||||
*ws.3.entry(kind).or_default().entry(rgb).or_insert(0) += 1;
|
if AVERAGE_PALETTE {
|
||||||
|
*ws.3.entry(kind).or_default().entry(rgb).or_insert(0) += 1;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn put_sprite(
|
fn put_sprite(
|
||||||
@ -663,29 +663,34 @@ impl VoxelImageEncoding for TriPngEncoding {
|
|||||||
f(&ws.1, 1)?;
|
f(&ws.1, 1)?;
|
||||||
f(&ws.2, 2)?;
|
f(&ws.2, 2)?;
|
||||||
|
|
||||||
let mut palette = vec![Rgb { r: 0, g: 0, b: 0 }; 256];
|
let palette = if AVERAGE_PALETTE {
|
||||||
for (block, hist) in ws.3.iter() {
|
let mut palette = vec![Rgb { r: 0, g: 0, b: 0 }; 256];
|
||||||
let (mut r, mut g, mut b) = (0.0, 0.0, 0.0);
|
for (block, hist) in ws.3.iter() {
|
||||||
let mut total = 0;
|
let (mut r, mut g, mut b) = (0.0, 0.0, 0.0);
|
||||||
for (color, count) in hist.iter() {
|
let mut total = 0;
|
||||||
r += color.r as f64 * *count as f64;
|
for (color, count) in hist.iter() {
|
||||||
g += color.g as f64 * *count as f64;
|
r += color.r as f64 * *count as f64;
|
||||||
b += color.b as f64 * *count as f64;
|
g += color.g as f64 * *count as f64;
|
||||||
total += *count;
|
b += color.b as f64 * *count as f64;
|
||||||
|
total += *count;
|
||||||
|
}
|
||||||
|
r /= total as f64;
|
||||||
|
g /= total as f64;
|
||||||
|
b /= total as f64;
|
||||||
|
palette[*block as u8 as usize].r = r as u8;
|
||||||
|
palette[*block as u8 as usize].g = g as u8;
|
||||||
|
palette[*block as u8 as usize].b = b as u8;
|
||||||
}
|
}
|
||||||
r /= total as f64;
|
palette
|
||||||
g /= total as f64;
|
} else {
|
||||||
b /= total as f64;
|
Vec::new()
|
||||||
palette[*block as u8 as usize].r = r as u8;
|
};
|
||||||
palette[*block as u8 as usize].g = g as u8;
|
|
||||||
palette[*block as u8 as usize].b = b as u8;
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(CompressedData::compress(&(buf, palette, indices), 4))
|
Some(CompressedData::compress(&(buf, palette, indices), 4))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl VoxelImageDecoding for TriPngEncoding {
|
impl<const AVERAGE_PALETTE: bool> VoxelImageDecoding for TriPngEncoding<AVERAGE_PALETTE> {
|
||||||
fn start(data: &Self::Output) -> Option<Self::Workspace> {
|
fn start(data: &Self::Output) -> Option<Self::Workspace> {
|
||||||
use image::codecs::png::PngDecoder;
|
use image::codecs::png::PngDecoder;
|
||||||
let (quad, palette, indices) = data.decompress()?;
|
let (quad, palette, indices) = data.decompress()?;
|
||||||
@ -698,12 +703,14 @@ impl VoxelImageDecoding for TriPngEncoding {
|
|||||||
let b = image_from_bytes(PngDecoder::new(&quad[ranges[1].clone()]).ok()?)?;
|
let b = image_from_bytes(PngDecoder::new(&quad[ranges[1].clone()]).ok()?)?;
|
||||||
let c = image_from_bytes(PngDecoder::new(&quad[ranges[2].clone()]).ok()?)?;
|
let c = image_from_bytes(PngDecoder::new(&quad[ranges[2].clone()]).ok()?)?;
|
||||||
let mut d: HashMap<_, HashMap<_, _>> = HashMap::new();
|
let mut d: HashMap<_, HashMap<_, _>> = HashMap::new();
|
||||||
for i in 0..=255 {
|
if AVERAGE_PALETTE {
|
||||||
if let Some(block) = BlockKind::from_u8(i) {
|
for i in 0..=255 {
|
||||||
d.entry(block)
|
if let Some(block) = BlockKind::from_u8(i) {
|
||||||
.or_default()
|
d.entry(block)
|
||||||
.entry(palette[i as usize])
|
.or_default()
|
||||||
.insert(1);
|
.entry(palette[i as usize])
|
||||||
|
.insert(1);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -713,11 +720,62 @@ impl VoxelImageDecoding for TriPngEncoding {
|
|||||||
fn get_block(ws: &Self::Workspace, x: u32, y: u32, _: bool) -> Block {
|
fn get_block(ws: &Self::Workspace, x: u32, y: u32, _: bool) -> Block {
|
||||||
if let Some(kind) = BlockKind::from_u8(ws.0.get_pixel(x, y).0[0]) {
|
if let Some(kind) = BlockKind::from_u8(ws.0.get_pixel(x, y).0[0]) {
|
||||||
if kind.is_filled() {
|
if kind.is_filled() {
|
||||||
let rgb = *ws
|
let rgb = if AVERAGE_PALETTE {
|
||||||
.3
|
*ws.3
|
||||||
.get(&kind)
|
.get(&kind)
|
||||||
.and_then(|h| h.keys().next())
|
.and_then(|h| h.keys().next())
|
||||||
.unwrap_or(&Rgb::default());
|
.unwrap_or(&Rgb::default())
|
||||||
|
} else {
|
||||||
|
use BlockKind::*;
|
||||||
|
match kind {
|
||||||
|
Air | Water => Rgb { r: 0, g: 0, b: 0 },
|
||||||
|
Rock => Rgb {
|
||||||
|
r: 93,
|
||||||
|
g: 110,
|
||||||
|
b: 145,
|
||||||
|
},
|
||||||
|
WeakRock => Rgb {
|
||||||
|
r: 93,
|
||||||
|
g: 132,
|
||||||
|
b: 145,
|
||||||
|
},
|
||||||
|
Grass => Rgb {
|
||||||
|
r: 51,
|
||||||
|
g: 160,
|
||||||
|
b: 94,
|
||||||
|
},
|
||||||
|
Snow => Rgb {
|
||||||
|
r: 192,
|
||||||
|
g: 255,
|
||||||
|
b: 255,
|
||||||
|
},
|
||||||
|
Earth => Rgb {
|
||||||
|
r: 200,
|
||||||
|
g: 140,
|
||||||
|
b: 93,
|
||||||
|
},
|
||||||
|
Sand => Rgb {
|
||||||
|
r: 241,
|
||||||
|
g: 177,
|
||||||
|
b: 128,
|
||||||
|
},
|
||||||
|
Wood => Rgb {
|
||||||
|
r: 128,
|
||||||
|
g: 77,
|
||||||
|
b: 51,
|
||||||
|
},
|
||||||
|
Leaves => Rgb {
|
||||||
|
r: 93,
|
||||||
|
g: 206,
|
||||||
|
b: 64,
|
||||||
|
},
|
||||||
|
Misc => Rgb {
|
||||||
|
r: 255,
|
||||||
|
g: 0,
|
||||||
|
b: 255,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
};
|
||||||
Block::new(kind, rgb)
|
Block::new(kind, rgb)
|
||||||
} else {
|
} else {
|
||||||
let mut block = Block::new(kind, Rgb { r: 0, g: 0, b: 0 });
|
let mut block = Block::new(kind, Rgb { r: 0, g: 0, b: 0 });
|
||||||
|
@ -15,7 +15,7 @@ pub use self::{
|
|||||||
server::{
|
server::{
|
||||||
CharacterInfo, DisconnectReason, InviteAnswer, Notification, PlayerInfo, PlayerListUpdate,
|
CharacterInfo, DisconnectReason, InviteAnswer, Notification, PlayerInfo, PlayerListUpdate,
|
||||||
RegisterError, SerializedTerrainChunk, ServerGeneral, ServerInfo, ServerInit, ServerMsg,
|
RegisterError, SerializedTerrainChunk, ServerGeneral, ServerInfo, ServerInit, ServerMsg,
|
||||||
ServerRegisterAnswer,
|
ServerRegisterAnswer, TERRAIN_LOW_BANDWIDTH,
|
||||||
},
|
},
|
||||||
world_msg::WorldMapMsg,
|
world_msg::WorldMapMsg,
|
||||||
};
|
};
|
||||||
|
@ -70,12 +70,16 @@ pub type ServerRegisterAnswer = Result<(), RegisterError>;
|
|||||||
pub enum SerializedTerrainChunk {
|
pub enum SerializedTerrainChunk {
|
||||||
DeflatedChonk(CompressedData<TerrainChunk>),
|
DeflatedChonk(CompressedData<TerrainChunk>),
|
||||||
QuadPng(WireChonk<QuadPngEncoding<4>, WidePacking<true>, TerrainChunkMeta, TerrainChunkSize>),
|
QuadPng(WireChonk<QuadPngEncoding<4>, WidePacking<true>, TerrainChunkMeta, TerrainChunkSize>),
|
||||||
TriPng(WireChonk<TriPngEncoding, WidePacking<true>, TerrainChunkMeta, TerrainChunkSize>),
|
TriPng(WireChonk<TriPngEncoding<false>, WidePacking<true>, TerrainChunkMeta, TerrainChunkSize>),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// If someone has less than this number of bytes per second of bandwidth, spend
|
||||||
|
/// more CPU generating a smaller encoding of terrain data.
|
||||||
|
pub const TERRAIN_LOW_BANDWIDTH: f32 = 5_000_000.0;
|
||||||
|
|
||||||
impl SerializedTerrainChunk {
|
impl SerializedTerrainChunk {
|
||||||
pub fn via_heuristic(chunk: &TerrainChunk) -> Self {
|
pub fn via_heuristic(chunk: &TerrainChunk, low_bandwidth: bool) -> Self {
|
||||||
if chunk.get_max_z() - chunk.get_min_z() < 128 {
|
if low_bandwidth && (chunk.get_max_z() - chunk.get_min_z() <= 128) {
|
||||||
Self::quadpng(chunk)
|
Self::quadpng(chunk)
|
||||||
} else {
|
} else {
|
||||||
Self::deflate(chunk)
|
Self::deflate(chunk)
|
||||||
@ -96,7 +100,7 @@ impl SerializedTerrainChunk {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn tripng(chunk: &TerrainChunk) -> Self {
|
pub fn tripng(chunk: &TerrainChunk) -> Self {
|
||||||
if let Some(wc) = WireChonk::from_chonk(TriPngEncoding, WidePacking(), chunk) {
|
if let Some(wc) = WireChonk::from_chonk(TriPngEncoding(), WidePacking(), chunk) {
|
||||||
Self::TriPng(wc)
|
Self::TriPng(wc)
|
||||||
} else {
|
} else {
|
||||||
warn!("Image encoding failure occurred, falling back to deflate");
|
warn!("Image encoding failure occurred, falling back to deflate");
|
||||||
|
@ -6,7 +6,9 @@ use common::{
|
|||||||
vol::RectVolSize,
|
vol::RectVolSize,
|
||||||
};
|
};
|
||||||
use common_ecs::{Job, Origin, ParMode, Phase, System};
|
use common_ecs::{Job, Origin, ParMode, Phase, System};
|
||||||
use common_net::msg::{ClientGeneral, SerializedTerrainChunk, ServerGeneral};
|
use common_net::msg::{
|
||||||
|
ClientGeneral, SerializedTerrainChunk, ServerGeneral, TERRAIN_LOW_BANDWIDTH,
|
||||||
|
};
|
||||||
use rayon::iter::ParallelIterator;
|
use rayon::iter::ParallelIterator;
|
||||||
use specs::{Entities, Join, ParJoin, Read, ReadExpect, ReadStorage};
|
use specs::{Entities, Join, ParJoin, Read, ReadExpect, ReadStorage};
|
||||||
use tracing::{debug, trace};
|
use tracing::{debug, trace};
|
||||||
@ -77,12 +79,17 @@ impl<'a> System<'a> for Sys {
|
|||||||
match terrain.get_key_arc(key) {
|
match terrain.get_key_arc(key) {
|
||||||
Some(chunk) => {
|
Some(chunk) => {
|
||||||
network_metrics.chunks_served_from_memory.inc();
|
network_metrics.chunks_served_from_memory.inc();
|
||||||
client.send(ServerGeneral::TerrainChunkUpdate {
|
if let Some(participant) = &client.participant {
|
||||||
key,
|
let low_bandwidth =
|
||||||
chunk: Ok(SerializedTerrainChunk::via_heuristic(
|
participant.bandwidth() < TERRAIN_LOW_BANDWIDTH;
|
||||||
&chunk,
|
client.send(ServerGeneral::TerrainChunkUpdate {
|
||||||
)),
|
key,
|
||||||
})?
|
chunk: Ok(SerializedTerrainChunk::via_heuristic(
|
||||||
|
&chunk,
|
||||||
|
low_bandwidth,
|
||||||
|
)),
|
||||||
|
})?
|
||||||
|
}
|
||||||
},
|
},
|
||||||
None => {
|
None => {
|
||||||
network_metrics.chunks_generation_triggered.inc();
|
network_metrics.chunks_generation_triggered.inc();
|
||||||
|
@ -14,7 +14,7 @@ use common::{
|
|||||||
LoadoutBuilder, SkillSetBuilder,
|
LoadoutBuilder, SkillSetBuilder,
|
||||||
};
|
};
|
||||||
use common_ecs::{Job, Origin, Phase, System};
|
use common_ecs::{Job, Origin, Phase, System};
|
||||||
use common_net::msg::{SerializedTerrainChunk, ServerGeneral};
|
use common_net::msg::{SerializedTerrainChunk, ServerGeneral, TERRAIN_LOW_BANDWIDTH};
|
||||||
use common_state::TerrainChanges;
|
use common_state::TerrainChanges;
|
||||||
use comp::Behavior;
|
use comp::Behavior;
|
||||||
use specs::{Join, Read, ReadStorage, Write, WriteExpect};
|
use specs::{Join, Read, ReadStorage, Write, WriteExpect};
|
||||||
@ -222,11 +222,8 @@ impl<'a> System<'a> for Sys {
|
|||||||
// Send the chunk to all nearby players.
|
// Send the chunk to all nearby players.
|
||||||
use rayon::iter::{IntoParallelIterator, ParallelIterator};
|
use rayon::iter::{IntoParallelIterator, ParallelIterator};
|
||||||
new_chunks.into_par_iter().for_each(|(key, chunk)| {
|
new_chunks.into_par_iter().for_each(|(key, chunk)| {
|
||||||
let mut msg = Some(ServerGeneral::TerrainChunkUpdate {
|
let mut lazy_msg_lo = None;
|
||||||
key,
|
let mut lazy_msg_hi = None;
|
||||||
chunk: Ok(SerializedTerrainChunk::via_heuristic(&*chunk)),
|
|
||||||
});
|
|
||||||
let mut lazy_msg = None;
|
|
||||||
|
|
||||||
(&presences, &positions, &clients)
|
(&presences, &positions, &clients)
|
||||||
.join()
|
.join()
|
||||||
@ -240,11 +237,26 @@ impl<'a> System<'a> for Sys {
|
|||||||
.magnitude_squared();
|
.magnitude_squared();
|
||||||
|
|
||||||
if adjusted_dist_sqr <= presence.view_distance.pow(2) {
|
if adjusted_dist_sqr <= presence.view_distance.pow(2) {
|
||||||
if let Some(msg) = msg.take() {
|
if let Some(participant) = &client.participant {
|
||||||
lazy_msg = Some(client.prepare(msg));
|
let low_bandwidth = participant.bandwidth() < TERRAIN_LOW_BANDWIDTH;
|
||||||
};
|
let lazy_msg = if low_bandwidth {
|
||||||
|
&mut lazy_msg_lo
|
||||||
|
} else {
|
||||||
|
&mut lazy_msg_hi
|
||||||
|
};
|
||||||
|
if lazy_msg.is_none() {
|
||||||
|
*lazy_msg =
|
||||||
|
Some(client.prepare(ServerGeneral::TerrainChunkUpdate {
|
||||||
|
key,
|
||||||
|
chunk: Ok(SerializedTerrainChunk::via_heuristic(
|
||||||
|
&*chunk,
|
||||||
|
low_bandwidth,
|
||||||
|
)),
|
||||||
|
}));
|
||||||
|
};
|
||||||
|
|
||||||
lazy_msg.as_ref().map(|msg| client.send_prepared(msg));
|
lazy_msg.as_ref().map(|msg| client.send_prepared(msg));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
use crate::{client::Client, presence::Presence};
|
use crate::{client::Client, presence::Presence};
|
||||||
use common::{comp::Pos, terrain::TerrainGrid};
|
use common::{comp::Pos, terrain::TerrainGrid};
|
||||||
use common_ecs::{Job, Origin, Phase, System};
|
use common_ecs::{Job, Origin, Phase, System};
|
||||||
use common_net::msg::{CompressedData, SerializedTerrainChunk, ServerGeneral};
|
use common_net::msg::{
|
||||||
|
CompressedData, SerializedTerrainChunk, ServerGeneral, TERRAIN_LOW_BANDWIDTH,
|
||||||
|
};
|
||||||
use common_state::TerrainChanges;
|
use common_state::TerrainChanges;
|
||||||
use specs::{Join, Read, ReadExpect, ReadStorage};
|
use specs::{Join, Read, ReadExpect, ReadStorage};
|
||||||
|
|
||||||
@ -29,21 +31,36 @@ impl<'a> System<'a> for Sys {
|
|||||||
) {
|
) {
|
||||||
// Sync changed chunks
|
// Sync changed chunks
|
||||||
'chunk: for chunk_key in &terrain_changes.modified_chunks {
|
'chunk: for chunk_key in &terrain_changes.modified_chunks {
|
||||||
let mut lazy_msg = None;
|
let mut lazy_msg_hi = None;
|
||||||
|
let mut lazy_msg_lo = None;
|
||||||
|
|
||||||
for (presence, pos, client) in (&presences, &positions, &clients).join() {
|
for (presence, pos, client) in (&presences, &positions, &clients).join() {
|
||||||
if super::terrain::chunk_in_vd(pos.0, *chunk_key, &terrain, presence.view_distance)
|
if let Some(participant) = &client.participant {
|
||||||
{
|
let low_bandwidth = participant.bandwidth() < TERRAIN_LOW_BANDWIDTH;
|
||||||
if lazy_msg.is_none() {
|
let lazy_msg = if low_bandwidth {
|
||||||
lazy_msg = Some(client.prepare(ServerGeneral::TerrainChunkUpdate {
|
&mut lazy_msg_lo
|
||||||
key: *chunk_key,
|
} else {
|
||||||
chunk: Ok(match terrain.get_key(*chunk_key) {
|
&mut lazy_msg_hi
|
||||||
Some(chunk) => SerializedTerrainChunk::via_heuristic(&chunk),
|
};
|
||||||
None => break 'chunk,
|
if super::terrain::chunk_in_vd(
|
||||||
}),
|
pos.0,
|
||||||
}));
|
*chunk_key,
|
||||||
|
&terrain,
|
||||||
|
presence.view_distance,
|
||||||
|
) {
|
||||||
|
if lazy_msg.is_none() {
|
||||||
|
*lazy_msg = Some(client.prepare(ServerGeneral::TerrainChunkUpdate {
|
||||||
|
key: *chunk_key,
|
||||||
|
chunk: Ok(match terrain.get_key(*chunk_key) {
|
||||||
|
Some(chunk) => {
|
||||||
|
SerializedTerrainChunk::via_heuristic(&chunk, low_bandwidth)
|
||||||
|
},
|
||||||
|
None => break 'chunk,
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
lazy_msg.as_ref().map(|ref msg| client.send_prepared(&msg));
|
||||||
}
|
}
|
||||||
lazy_msg.as_ref().map(|ref msg| client.send_prepared(&msg));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -620,18 +620,26 @@ fn main() {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
let quadpngquartwide_post = Instant::now();
|
let quadpngquartwide_post = Instant::now();
|
||||||
|
|
||||||
let tripng_pre = Instant::now();
|
let tripngaverage_pre = Instant::now();
|
||||||
let tripng =
|
let tripngaverage =
|
||||||
image_terrain_chonk(TriPngEncoding, TallPacking { flip_y: true }, &chunk)
|
image_terrain_chonk(TriPngEncoding::<true>(), WidePacking::<true>(), &chunk)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let tripng_post = Instant::now();
|
let tripngaverage_post = Instant::now();
|
||||||
|
|
||||||
|
let tripngconst_pre = Instant::now();
|
||||||
|
let tripngconst =
|
||||||
|
image_terrain_chonk(TriPngEncoding::<false>(), WidePacking::<true>(), &chunk)
|
||||||
|
.unwrap();
|
||||||
|
let tripngconst_post = Instant::now();
|
||||||
|
|
||||||
#[rustfmt::skip]
|
#[rustfmt::skip]
|
||||||
sizes.extend_from_slice(&[
|
sizes.extend_from_slice(&[
|
||||||
("quadpngfull", quadpngfull.data.len() as f32 / n as f32),
|
("quadpngfull", quadpngfull.data.len() as f32 / n as f32),
|
||||||
("quadpnghalf", quadpnghalf.data.len() as f32 / n as f32),
|
("quadpnghalf", quadpnghalf.data.len() as f32 / n as f32),
|
||||||
("quadpngquarttall", quadpngquarttall.data.len() as f32 / n as f32),
|
("quadpngquarttall", quadpngquarttall.data.len() as f32 / n as f32),
|
||||||
("quadpngquartwide", quadpngquartwide.data.len() as f32 / n as f32),
|
("quadpngquartwide", quadpngquartwide.data.len() as f32 / n as f32),
|
||||||
("tripng", tripng.data.len() as f32 / n as f32),
|
("tripngaverage", tripngaverage.data.len() as f32 / n as f32),
|
||||||
|
("tripngconst", tripngconst.data.len() as f32 / n as f32),
|
||||||
]);
|
]);
|
||||||
let best_idx = sizes
|
let best_idx = sizes
|
||||||
.iter()
|
.iter()
|
||||||
@ -650,7 +658,8 @@ fn main() {
|
|||||||
("quadpnghalf", (quadpnghalf_post - quadpnghalf_pre).subsec_nanos()),
|
("quadpnghalf", (quadpnghalf_post - quadpnghalf_pre).subsec_nanos()),
|
||||||
("quadpngquarttall", (quadpngquarttall_post - quadpngquarttall_pre).subsec_nanos()),
|
("quadpngquarttall", (quadpngquarttall_post - quadpngquarttall_pre).subsec_nanos()),
|
||||||
("quadpngquartwide", (quadpngquartwide_post - quadpngquartwide_pre).subsec_nanos()),
|
("quadpngquartwide", (quadpngquartwide_post - quadpngquartwide_pre).subsec_nanos()),
|
||||||
("tripng", (tripng_post - tripng_pre).subsec_nanos()),
|
("tripngaverage", (tripngaverage_post - tripngaverage_pre).subsec_nanos()),
|
||||||
|
("tripngconst", (tripngconst_post - tripngconst_pre).subsec_nanos()),
|
||||||
]);
|
]);
|
||||||
{
|
{
|
||||||
let bucket = z_buckets
|
let bucket = z_buckets
|
||||||
@ -672,14 +681,23 @@ fn main() {
|
|||||||
bucket.1 +=
|
bucket.1 +=
|
||||||
(quadpngquartwide_post - quadpngquartwide_pre).subsec_nanos() as f32;
|
(quadpngquartwide_post - quadpngquartwide_pre).subsec_nanos() as f32;
|
||||||
}
|
}
|
||||||
if false {
|
if true {
|
||||||
let bucket = z_buckets
|
let bucket = z_buckets
|
||||||
.entry("tripng")
|
.entry("tripngaverage")
|
||||||
.or_default()
|
.or_default()
|
||||||
.entry(chunk.get_max_z() - chunk.get_min_z())
|
.entry(chunk.get_max_z() - chunk.get_min_z())
|
||||||
.or_insert((0, 0.0));
|
.or_insert((0, 0.0));
|
||||||
bucket.0 += 1;
|
bucket.0 += 1;
|
||||||
bucket.1 += (tripng_post - tripng_pre).subsec_nanos() as f32;
|
bucket.1 += (tripngaverage_post - tripngaverage_pre).subsec_nanos() as f32;
|
||||||
|
}
|
||||||
|
if true {
|
||||||
|
let bucket = z_buckets
|
||||||
|
.entry("tripngconst")
|
||||||
|
.or_default()
|
||||||
|
.entry(chunk.get_max_z() - chunk.get_min_z())
|
||||||
|
.or_insert((0, 0.0));
|
||||||
|
bucket.0 += 1;
|
||||||
|
bucket.1 += (tripngconst_post - tripngconst_pre).subsec_nanos() as f32;
|
||||||
}
|
}
|
||||||
trace!(
|
trace!(
|
||||||
"{} {}: uncompressed: {}, {:?} {} {:?}",
|
"{} {}: uncompressed: {}, {:?} {} {:?}",
|
||||||
|
Loading…
Reference in New Issue
Block a user