mirror of
https://gitlab.com/veloren/veloren.git
synced 2024-08-30 18:12:32 +00:00
Compress terrain chunks with deflate. Includes a benchmark showing that this makes them around 70% smaller, and is the same speed as LZ4.
This commit is contained in:
parent
b10718c568
commit
6d9de520f3
@ -43,6 +43,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
- Entities now have density
|
- Entities now have density
|
||||||
- Buoyancy is calculated from the difference in density between an entity and surrounding fluid
|
- Buoyancy is calculated from the difference in density between an entity and surrounding fluid
|
||||||
- Drag is now calculated based on physical properties
|
- Drag is now calculated based on physical properties
|
||||||
|
- Terrain chunks are now deflate-compressed when sent over the network.
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
|
||||||
|
17
Cargo.lock
generated
17
Cargo.lock
generated
@ -1308,6 +1308,15 @@ dependencies = [
|
|||||||
"byteorder",
|
"byteorder",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "deflate"
|
||||||
|
version = "0.9.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5f95bf05dffba6e6cce8dfbb30def788154949ccd9aed761b472119c21e01c70"
|
||||||
|
dependencies = [
|
||||||
|
"adler32",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "derivative"
|
name = "derivative"
|
||||||
version = "2.2.0"
|
version = "2.2.0"
|
||||||
@ -3765,7 +3774,7 @@ checksum = "3c3287920cb847dee3de33d301c463fba14dda99db24214ddf93f83d3021f4c6"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags",
|
||||||
"crc32fast",
|
"crc32fast",
|
||||||
"deflate",
|
"deflate 0.8.6",
|
||||||
"miniz_oxide 0.3.7",
|
"miniz_oxide 0.3.7",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -5454,6 +5463,7 @@ dependencies = [
|
|||||||
"approx 0.4.0",
|
"approx 0.4.0",
|
||||||
"arraygen",
|
"arraygen",
|
||||||
"assets_manager",
|
"assets_manager",
|
||||||
|
"bincode",
|
||||||
"bitflags",
|
"bitflags",
|
||||||
"criterion",
|
"criterion",
|
||||||
"crossbeam-channel",
|
"crossbeam-channel",
|
||||||
@ -5461,6 +5471,7 @@ dependencies = [
|
|||||||
"csv",
|
"csv",
|
||||||
"dot_vox",
|
"dot_vox",
|
||||||
"enum-iterator",
|
"enum-iterator",
|
||||||
|
"flate2",
|
||||||
"hashbrown",
|
"hashbrown",
|
||||||
"image",
|
"image",
|
||||||
"indexmap",
|
"indexmap",
|
||||||
@ -5809,12 +5820,15 @@ dependencies = [
|
|||||||
"bincode",
|
"bincode",
|
||||||
"bitvec",
|
"bitvec",
|
||||||
"criterion",
|
"criterion",
|
||||||
|
"deflate 0.9.1",
|
||||||
"enum-iterator",
|
"enum-iterator",
|
||||||
|
"flate2",
|
||||||
"fxhash",
|
"fxhash",
|
||||||
"hashbrown",
|
"hashbrown",
|
||||||
"image",
|
"image",
|
||||||
"itertools 0.10.0",
|
"itertools 0.10.0",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
|
"lz-fear",
|
||||||
"minifb",
|
"minifb",
|
||||||
"noise",
|
"noise",
|
||||||
"num 0.4.0",
|
"num 0.4.0",
|
||||||
@ -5831,6 +5845,7 @@ dependencies = [
|
|||||||
"tracing-subscriber",
|
"tracing-subscriber",
|
||||||
"vek",
|
"vek",
|
||||||
"veloren-common",
|
"veloren-common",
|
||||||
|
"veloren-common-frontend",
|
||||||
"veloren-common-net",
|
"veloren-common-net",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -1923,7 +1923,7 @@ impl Client {
|
|||||||
fn handle_server_terrain_msg(&mut self, msg: ServerGeneral) -> Result<(), Error> {
|
fn handle_server_terrain_msg(&mut self, msg: ServerGeneral) -> Result<(), Error> {
|
||||||
match msg {
|
match msg {
|
||||||
ServerGeneral::TerrainChunkUpdate { key, chunk } => {
|
ServerGeneral::TerrainChunkUpdate { key, chunk } => {
|
||||||
if let Ok(chunk) = chunk {
|
if let Some(chunk) = chunk.ok().and_then(|c| c.to_chunk()) {
|
||||||
self.state.insert_chunk(key, chunk);
|
self.state.insert_chunk(key, chunk);
|
||||||
}
|
}
|
||||||
self.pending_chunks.remove(&key);
|
self.pending_chunks.remove(&key);
|
||||||
|
@ -9,8 +9,9 @@ no-assets = []
|
|||||||
tracy = ["common-base/tracy"]
|
tracy = ["common-base/tracy"]
|
||||||
simd = ["vek/platform_intrinsics"]
|
simd = ["vek/platform_intrinsics"]
|
||||||
bin_csv = ["csv", "structopt"]
|
bin_csv = ["csv", "structopt"]
|
||||||
|
compression = ["flate2"]
|
||||||
|
|
||||||
default = ["simd"]
|
default = ["simd", "compression"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
|
||||||
@ -23,11 +24,13 @@ serde = { version = "1.0.110", features = ["derive", "rc"] }
|
|||||||
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
|
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
|
||||||
approx = "0.4.0"
|
approx = "0.4.0"
|
||||||
arraygen = "0.1.13"
|
arraygen = "0.1.13"
|
||||||
|
bincode = "1.3.3"
|
||||||
crossbeam-utils = "0.8.1"
|
crossbeam-utils = "0.8.1"
|
||||||
bitflags = "1.2"
|
bitflags = "1.2"
|
||||||
crossbeam-channel = "0.5"
|
crossbeam-channel = "0.5"
|
||||||
enum-iterator = "0.6"
|
enum-iterator = "0.6"
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
|
flate2 = { version = "1.0.20", optional = true }
|
||||||
num-derive = "0.3"
|
num-derive = "0.3"
|
||||||
num-traits = "0.2"
|
num-traits = "0.2"
|
||||||
ordered-float = { version = "2.0.1", default-features = false }
|
ordered-float = { version = "2.0.1", default-features = false }
|
||||||
|
@ -6,7 +6,7 @@ use common::{
|
|||||||
outcome::Outcome,
|
outcome::Outcome,
|
||||||
recipe::RecipeBook,
|
recipe::RecipeBook,
|
||||||
resources::TimeOfDay,
|
resources::TimeOfDay,
|
||||||
terrain::{Block, TerrainChunk},
|
terrain::{Block, SerializedTerrainChunk},
|
||||||
trade::{PendingTrade, SitePrices, TradeId, TradeResult},
|
trade::{PendingTrade, SitePrices, TradeId, TradeResult},
|
||||||
uid::Uid,
|
uid::Uid,
|
||||||
};
|
};
|
||||||
@ -106,7 +106,7 @@ pub enum ServerGeneral {
|
|||||||
// Ingame related AND terrain stream
|
// Ingame related AND terrain stream
|
||||||
TerrainChunkUpdate {
|
TerrainChunkUpdate {
|
||||||
key: Vec2<i32>,
|
key: Vec2<i32>,
|
||||||
chunk: Result<Arc<TerrainChunk>, ()>,
|
chunk: Result<SerializedTerrainChunk, ()>,
|
||||||
},
|
},
|
||||||
TerrainBlockUpdates(HashMap<Vec3<i32>, Block>),
|
TerrainBlockUpdates(HashMap<Vec3<i32>, Block>),
|
||||||
// Always possible
|
// Always possible
|
||||||
|
@ -17,6 +17,7 @@ pub use self::{
|
|||||||
};
|
};
|
||||||
use roots::find_roots_cubic;
|
use roots::find_roots_cubic;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use tracing::trace;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
vol::{ReadVol, RectVolSize},
|
vol::{ReadVol, RectVolSize},
|
||||||
@ -142,6 +143,56 @@ impl TerrainChunkMeta {
|
|||||||
pub type TerrainChunk = chonk::Chonk<Block, TerrainChunkSize, TerrainChunkMeta>;
|
pub type TerrainChunk = chonk::Chonk<Block, TerrainChunkSize, TerrainChunkMeta>;
|
||||||
pub type TerrainGrid = VolGrid2d<TerrainChunk>;
|
pub type TerrainGrid = VolGrid2d<TerrainChunk>;
|
||||||
|
|
||||||
|
/// Wrapper for custom serialization strategies (e.g. compression) for terrain
|
||||||
|
/// chunks
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
|
pub struct SerializedTerrainChunk(pub Vec<u8>);
|
||||||
|
|
||||||
|
impl SerializedTerrainChunk {
|
||||||
|
pub fn from_chunk(chunk: &TerrainChunk) -> Self {
|
||||||
|
let uncompressed = bincode::serialize(chunk)
|
||||||
|
.expect("bincode serialization can only fail if a byte limit is set");
|
||||||
|
#[cfg(feature = "compression")]
|
||||||
|
{
|
||||||
|
use flate2::{write::DeflateEncoder, Compression};
|
||||||
|
use std::io::Write;
|
||||||
|
const EXPECT_MSG: &str =
|
||||||
|
"compression only fails for fallible Read/Write impls (which Vec<u8> is not)";
|
||||||
|
|
||||||
|
let mut encoder = DeflateEncoder::new(Vec::new(), Compression::new(5));
|
||||||
|
encoder.write_all(&*uncompressed).expect(EXPECT_MSG);
|
||||||
|
let compressed = encoder.finish().expect(EXPECT_MSG);
|
||||||
|
trace!(
|
||||||
|
"compressed {}, uncompressed {}, ratio {}",
|
||||||
|
compressed.len(),
|
||||||
|
uncompressed.len(),
|
||||||
|
compressed.len() as f32 / uncompressed.len() as f32
|
||||||
|
);
|
||||||
|
SerializedTerrainChunk(compressed)
|
||||||
|
}
|
||||||
|
#[cfg(not(feature = "compression"))]
|
||||||
|
{
|
||||||
|
SerializedTerrainChunk(uncompressed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_chunk(&self) -> Option<TerrainChunk> {
|
||||||
|
#[cfg(feature = "compression")]
|
||||||
|
{
|
||||||
|
use std::io::Read;
|
||||||
|
let mut uncompressed = Vec::new();
|
||||||
|
flate2::read::DeflateDecoder::new(&*self.0)
|
||||||
|
.read_to_end(&mut uncompressed)
|
||||||
|
.ok()?;
|
||||||
|
bincode::deserialize(&*uncompressed).ok()
|
||||||
|
}
|
||||||
|
#[cfg(not(feature = "compression"))]
|
||||||
|
{
|
||||||
|
bincode::deserialize(&self.0).ok()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl TerrainGrid {
|
impl TerrainGrid {
|
||||||
/// Find a location suitable for spawning an entity near the given
|
/// Find a location suitable for spawning an entity near the given
|
||||||
/// position (but in the same chunk).
|
/// position (but in the same chunk).
|
||||||
|
@ -2,7 +2,7 @@ use crate::{client::Client, metrics::NetworkRequestMetrics, presence::Presence};
|
|||||||
use common::{
|
use common::{
|
||||||
comp::Pos,
|
comp::Pos,
|
||||||
event::{EventBus, ServerEvent},
|
event::{EventBus, ServerEvent},
|
||||||
terrain::{TerrainChunkSize, TerrainGrid},
|
terrain::{SerializedTerrainChunk, TerrainChunkSize, TerrainGrid},
|
||||||
vol::RectVolSize,
|
vol::RectVolSize,
|
||||||
};
|
};
|
||||||
use common_ecs::{Job, Origin, ParMode, Phase, System};
|
use common_ecs::{Job, Origin, ParMode, Phase, System};
|
||||||
@ -80,7 +80,7 @@ impl<'a> System<'a> for Sys {
|
|||||||
network_metrics.chunks_served_from_memory.inc();
|
network_metrics.chunks_served_from_memory.inc();
|
||||||
client.send(ServerGeneral::TerrainChunkUpdate {
|
client.send(ServerGeneral::TerrainChunkUpdate {
|
||||||
key,
|
key,
|
||||||
chunk: Ok(Arc::clone(chunk)),
|
chunk: Ok(SerializedTerrainChunk::from_chunk(&chunk)),
|
||||||
})?
|
})?
|
||||||
},
|
},
|
||||||
None => {
|
None => {
|
||||||
|
@ -10,7 +10,7 @@ use common::{
|
|||||||
event::{EventBus, ServerEvent},
|
event::{EventBus, ServerEvent},
|
||||||
generation::get_npc_name,
|
generation::get_npc_name,
|
||||||
npc::NPC_NAMES,
|
npc::NPC_NAMES,
|
||||||
terrain::TerrainGrid,
|
terrain::{SerializedTerrainChunk, TerrainGrid},
|
||||||
LoadoutBuilder, SkillSetBuilder,
|
LoadoutBuilder, SkillSetBuilder,
|
||||||
};
|
};
|
||||||
use common_ecs::{Job, Origin, Phase, System};
|
use common_ecs::{Job, Origin, Phase, System};
|
||||||
@ -93,6 +93,28 @@ impl<'a> System<'a> for Sys {
|
|||||||
// Add to list of chunks to send to nearby players.
|
// Add to list of chunks to send to nearby players.
|
||||||
new_chunks.push((key, Arc::clone(&chunk)));
|
new_chunks.push((key, Arc::clone(&chunk)));
|
||||||
|
|
||||||
|
// Send the chunk to all nearby players.
|
||||||
|
let mut lazy_msg = None;
|
||||||
|
for (presence, pos, client) in (&presences, &positions, &clients).join() {
|
||||||
|
let chunk_pos = terrain.pos_key(pos.0.map(|e| e as i32));
|
||||||
|
// Subtract 2 from the offset before computing squared magnitude
|
||||||
|
// 1 since chunks need neighbors to be meshed
|
||||||
|
// 1 to act as a buffer if the player moves in that direction
|
||||||
|
let adjusted_dist_sqr = (chunk_pos - key)
|
||||||
|
.map(|e: i32| (e.abs() as u32).saturating_sub(2))
|
||||||
|
.magnitude_squared();
|
||||||
|
|
||||||
|
if adjusted_dist_sqr <= presence.view_distance.pow(2) {
|
||||||
|
if lazy_msg.is_none() {
|
||||||
|
lazy_msg = Some(client.prepare(ServerGeneral::TerrainChunkUpdate {
|
||||||
|
key,
|
||||||
|
chunk: Ok(SerializedTerrainChunk::from_chunk(&*chunk)),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
lazy_msg.as_ref().map(|ref msg| client.send_prepared(&msg));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// TODO: code duplication for chunk insertion between here and state.rs
|
// TODO: code duplication for chunk insertion between here and state.rs
|
||||||
// Insert the chunk into terrain changes
|
// Insert the chunk into terrain changes
|
||||||
if terrain.insert(key, chunk).is_some() {
|
if terrain.insert(key, chunk).is_some() {
|
||||||
|
@ -1,5 +1,8 @@
|
|||||||
use crate::{client::Client, presence::Presence};
|
use crate::{client::Client, presence::Presence};
|
||||||
use common::{comp::Pos, terrain::TerrainGrid};
|
use common::{
|
||||||
|
comp::Pos,
|
||||||
|
terrain::{SerializedTerrainChunk, TerrainGrid},
|
||||||
|
};
|
||||||
use common_ecs::{Job, Origin, Phase, System};
|
use common_ecs::{Job, Origin, Phase, System};
|
||||||
use common_net::msg::ServerGeneral;
|
use common_net::msg::ServerGeneral;
|
||||||
use common_state::TerrainChanges;
|
use common_state::TerrainChanges;
|
||||||
@ -38,8 +41,8 @@ impl<'a> System<'a> for Sys {
|
|||||||
if lazy_msg.is_none() {
|
if lazy_msg.is_none() {
|
||||||
lazy_msg = Some(client.prepare(ServerGeneral::TerrainChunkUpdate {
|
lazy_msg = Some(client.prepare(ServerGeneral::TerrainChunkUpdate {
|
||||||
key: *chunk_key,
|
key: *chunk_key,
|
||||||
chunk: Ok(match terrain.get_key_arc(*chunk_key) {
|
chunk: Ok(match terrain.get_key(*chunk_key) {
|
||||||
Some(chunk) => Arc::clone(chunk),
|
Some(chunk) => SerializedTerrainChunk::from_chunk(&chunk),
|
||||||
None => break 'chunk,
|
None => break 'chunk,
|
||||||
}),
|
}),
|
||||||
}));
|
}));
|
||||||
|
@ -7,6 +7,7 @@ edition = "2018"
|
|||||||
[features]
|
[features]
|
||||||
tracy = ["common/tracy", "common-net/tracy"]
|
tracy = ["common/tracy", "common-net/tracy"]
|
||||||
simd = ["vek/platform_intrinsics"]
|
simd = ["vek/platform_intrinsics"]
|
||||||
|
bin_compression = ["lz-fear", "deflate", "flate2", "common-frontend"]
|
||||||
|
|
||||||
default = ["simd"]
|
default = ["simd"]
|
||||||
|
|
||||||
@ -37,6 +38,12 @@ ron = { version = "0.6", default-features = false }
|
|||||||
assets_manager = {version = "0.4.3", features = ["ron"]}
|
assets_manager = {version = "0.4.3", features = ["ron"]}
|
||||||
#inline_tweak = "1.0.2"
|
#inline_tweak = "1.0.2"
|
||||||
|
|
||||||
|
# compression benchmarks
|
||||||
|
lz-fear = { version = "0.1.1", optional = true }
|
||||||
|
deflate = { version = "0.9.1", optional = true }
|
||||||
|
flate2 = { version = "1.0.20", optional = true }
|
||||||
|
common-frontend = { package = "veloren-common-frontend", path = "../common/frontend", optional = true }
|
||||||
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
criterion = "0.3"
|
criterion = "0.3"
|
||||||
@ -48,3 +55,7 @@ structopt = "0.3"
|
|||||||
[[bench]]
|
[[bench]]
|
||||||
harness = false
|
harness = false
|
||||||
name = "tree"
|
name = "tree"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "chunk_compression_benchmarks"
|
||||||
|
required-features = ["bin_compression"]
|
||||||
|
246
world/src/bin/chunk_compression_benchmarks.rs
Normal file
246
world/src/bin/chunk_compression_benchmarks.rs
Normal file
@ -0,0 +1,246 @@
|
|||||||
|
use common::{
|
||||||
|
terrain::{chonk::Chonk, Block, BlockKind, SpriteKind},
|
||||||
|
vol::{IntoVolIterator, RectVolSize, SizedVol, WriteVol},
|
||||||
|
volumes::dyna::{Access, ColumnAccess, Dyna},
|
||||||
|
};
|
||||||
|
use hashbrown::HashMap;
|
||||||
|
use std::{
|
||||||
|
io::{Read, Write},
|
||||||
|
time::Instant,
|
||||||
|
};
|
||||||
|
use tracing::{debug, trace};
|
||||||
|
use vek::*;
|
||||||
|
use veloren_world::{
|
||||||
|
sim::{FileOpts, WorldOpts, DEFAULT_WORLD_MAP},
|
||||||
|
World,
|
||||||
|
};
|
||||||
|
|
||||||
|
fn lz4_with_dictionary(data: &[u8], dictionary: &[u8]) -> Vec<u8> {
|
||||||
|
let mut compressed = Vec::new();
|
||||||
|
lz_fear::CompressionSettings::default()
|
||||||
|
.dictionary(0, &dictionary)
|
||||||
|
.compress(data, &mut compressed)
|
||||||
|
.unwrap();
|
||||||
|
compressed
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
fn unlz4_with_dictionary(data: &[u8], dictionary: &[u8]) -> Option<Vec<u8>> {
|
||||||
|
lz_fear::LZ4FrameReader::new(data).ok().and_then(|r| {
|
||||||
|
let mut uncompressed = Vec::new();
|
||||||
|
r.into_read_with_dictionary(dictionary)
|
||||||
|
.read_to_end(&mut uncompressed)
|
||||||
|
.ok()?;
|
||||||
|
bincode::deserialize(&*uncompressed).ok()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
fn do_deflate(data: &[u8]) -> Vec<u8> {
|
||||||
|
use deflate::{write::DeflateEncoder, Compression};
|
||||||
|
|
||||||
|
let mut encoder = DeflateEncoder::new(Vec::new(), Compression::Fast);
|
||||||
|
encoder.write_all(data).expect("Write error!");
|
||||||
|
let compressed_data = encoder.finish().expect("Failed to finish compression!");
|
||||||
|
compressed_data
|
||||||
|
}
|
||||||
|
|
||||||
|
fn do_deflate_flate2(data: &[u8]) -> Vec<u8> {
|
||||||
|
use flate2::{write::DeflateEncoder, Compression};
|
||||||
|
|
||||||
|
let mut encoder = DeflateEncoder::new(Vec::new(), Compression::new(5));
|
||||||
|
encoder.write_all(data).expect("Write error!");
|
||||||
|
let compressed_data = encoder.finish().expect("Failed to finish compression!");
|
||||||
|
compressed_data
|
||||||
|
}
|
||||||
|
|
||||||
|
fn chonk_to_dyna<V: Clone, S: RectVolSize, M: Clone, A: Access>(
|
||||||
|
chonk: &Chonk<V, S, M>,
|
||||||
|
block: V,
|
||||||
|
) -> Dyna<V, M, A> {
|
||||||
|
let mut dyna = Dyna::<V, M, A>::filled(
|
||||||
|
Vec3::new(
|
||||||
|
S::RECT_SIZE.x,
|
||||||
|
S::RECT_SIZE.y,
|
||||||
|
(chonk.get_max_z() - chonk.get_min_z()) as u32,
|
||||||
|
),
|
||||||
|
block,
|
||||||
|
chonk.meta().clone(),
|
||||||
|
);
|
||||||
|
for (pos, block) in chonk.vol_iter(
|
||||||
|
Vec3::new(0, 0, chonk.get_min_z()),
|
||||||
|
Vec3::new(S::RECT_SIZE.x as _, S::RECT_SIZE.y as _, chonk.get_max_z()),
|
||||||
|
) {
|
||||||
|
dyna.set(pos - chonk.get_min_z() * Vec3::unit_z(), block.clone())
|
||||||
|
.expect("a bug here represents the arithmetic being wrong");
|
||||||
|
}
|
||||||
|
dyna
|
||||||
|
}
|
||||||
|
|
||||||
|
fn channelize_dyna<M: Clone, A: Access>(
|
||||||
|
dyna: &Dyna<Block, M, A>,
|
||||||
|
) -> (
|
||||||
|
Dyna<BlockKind, M, A>,
|
||||||
|
Vec<u8>,
|
||||||
|
Vec<u8>,
|
||||||
|
Vec<u8>,
|
||||||
|
Vec<SpriteKind>,
|
||||||
|
) {
|
||||||
|
let mut blocks = Dyna::filled(dyna.sz, BlockKind::Air, dyna.metadata().clone());
|
||||||
|
let (mut r, mut g, mut b, mut sprites) = (Vec::new(), Vec::new(), Vec::new(), Vec::new());
|
||||||
|
for (pos, block) in dyna.vol_iter(dyna.lower_bound(), dyna.upper_bound()) {
|
||||||
|
blocks.set(pos, **block).unwrap();
|
||||||
|
match (block.get_color(), block.get_sprite()) {
|
||||||
|
(Some(rgb), None) => {
|
||||||
|
r.push(rgb.r);
|
||||||
|
g.push(rgb.g);
|
||||||
|
b.push(rgb.b);
|
||||||
|
},
|
||||||
|
(None, Some(spritekind)) => {
|
||||||
|
sprites.push(spritekind);
|
||||||
|
},
|
||||||
|
_ => panic!(
|
||||||
|
"attr being used for color vs sprite is mutually exclusive (and that's required \
|
||||||
|
for this translation to be lossless), but there's no way to guarantee that at \
|
||||||
|
the type level with Block's public API"
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(blocks, r, g, b, sprites)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn histogram_to_dictionary(histogram: &HashMap<Vec<u8>, usize>, dictionary: &mut Vec<u8>) {
|
||||||
|
let mut tmp: Vec<(Vec<u8>, usize)> = histogram.iter().map(|(k, v)| (k.clone(), *v)).collect();
|
||||||
|
tmp.sort_by_key(|(_, count)| *count);
|
||||||
|
debug!("{:?}", tmp.last());
|
||||||
|
let mut i = 0;
|
||||||
|
let mut j = tmp.len() - 1;
|
||||||
|
while i < dictionary.len() && j > 0 {
|
||||||
|
let (k, v) = &tmp[j];
|
||||||
|
let dlen = dictionary.len();
|
||||||
|
let n = (i + k.len()).min(dlen);
|
||||||
|
dictionary[i..n].copy_from_slice(&k[0..k.len().min(dlen - i)]);
|
||||||
|
debug!("{}: {}: {:?}", tmp.len() - j, v, k);
|
||||||
|
j -= 1;
|
||||||
|
i = n;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
common_frontend::init_stdout(None);
|
||||||
|
println!("Loading world");
|
||||||
|
let (world, index) = World::generate(59686, WorldOpts {
|
||||||
|
seed_elements: true,
|
||||||
|
world_file: FileOpts::LoadAsset(DEFAULT_WORLD_MAP.into()),
|
||||||
|
..WorldOpts::default()
|
||||||
|
});
|
||||||
|
println!("Loaded world");
|
||||||
|
let mut histogram: HashMap<Vec<u8>, usize> = HashMap::new();
|
||||||
|
let mut histogram2: HashMap<Vec<u8>, usize> = HashMap::new();
|
||||||
|
let mut dictionary = vec![0xffu8; 1 << 16];
|
||||||
|
let mut dictionary2 = vec![0xffu8; 1 << 16];
|
||||||
|
let k = 32;
|
||||||
|
let sz = world.sim().get_size();
|
||||||
|
let mut totals = [0.0; 5];
|
||||||
|
let mut total_timings = [0.0; 2];
|
||||||
|
let mut count = 0;
|
||||||
|
for y in 1..sz.y {
|
||||||
|
for x in 1..sz.x {
|
||||||
|
let chunk =
|
||||||
|
world.generate_chunk(index.as_index_ref(), Vec2::new(x as _, y as _), || false);
|
||||||
|
if let Ok((chunk, _)) = chunk {
|
||||||
|
let uncompressed = bincode::serialize(&chunk).unwrap();
|
||||||
|
for w in uncompressed.windows(k) {
|
||||||
|
*histogram.entry(w.to_vec()).or_default() += 1;
|
||||||
|
}
|
||||||
|
if x % 128 == 0 {
|
||||||
|
histogram_to_dictionary(&histogram, &mut dictionary);
|
||||||
|
}
|
||||||
|
let lz4chonk_pre = Instant::now();
|
||||||
|
let lz4_chonk = lz4_with_dictionary(&bincode::serialize(&chunk).unwrap(), &[]);
|
||||||
|
let lz4chonk_post = Instant::now();
|
||||||
|
//let lz4_dict_chonk = SerializedTerrainChunk::from_chunk(&chunk,
|
||||||
|
// &*dictionary);
|
||||||
|
|
||||||
|
let deflatechonk_pre = Instant::now();
|
||||||
|
let deflate_chonk = do_deflate_flate2(&bincode::serialize(&chunk).unwrap());
|
||||||
|
let deflatechonk_post = Instant::now();
|
||||||
|
|
||||||
|
let dyna: Dyna<_, _, ColumnAccess> = chonk_to_dyna(&chunk, Block::empty());
|
||||||
|
let ser_dyna = bincode::serialize(&dyna).unwrap();
|
||||||
|
for w in ser_dyna.windows(k) {
|
||||||
|
*histogram2.entry(w.to_vec()).or_default() += 1;
|
||||||
|
}
|
||||||
|
if x % 128 == 0 {
|
||||||
|
histogram_to_dictionary(&histogram2, &mut dictionary2);
|
||||||
|
}
|
||||||
|
let lz4_dyna = lz4_with_dictionary(&*ser_dyna, &[]);
|
||||||
|
//let lz4_dict_dyna = lz4_with_dictionary(&*ser_dyna, &dictionary2);
|
||||||
|
let deflate_dyna = do_deflate(&*ser_dyna);
|
||||||
|
let deflate_channeled_dyna =
|
||||||
|
do_deflate_flate2(&bincode::serialize(&channelize_dyna(&dyna)).unwrap());
|
||||||
|
let n = uncompressed.len();
|
||||||
|
let sizes = [
|
||||||
|
lz4_chonk.len() as f32 / n as f32,
|
||||||
|
deflate_chonk.len() as f32 / n as f32,
|
||||||
|
lz4_dyna.len() as f32 / n as f32,
|
||||||
|
deflate_dyna.len() as f32 / n as f32,
|
||||||
|
deflate_channeled_dyna.len() as f32 / n as f32,
|
||||||
|
];
|
||||||
|
let i = sizes
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.fold((1.0, 0), |(best, i), (j, ratio)| {
|
||||||
|
if ratio < &best {
|
||||||
|
(*ratio, j)
|
||||||
|
} else {
|
||||||
|
(best, i)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.1;
|
||||||
|
let timings = [
|
||||||
|
(lz4chonk_post - lz4chonk_pre).subsec_nanos(),
|
||||||
|
(deflatechonk_post - deflatechonk_pre).subsec_nanos(),
|
||||||
|
];
|
||||||
|
trace!(
|
||||||
|
"{} {}: uncompressed: {}, {:?} {} {:?}",
|
||||||
|
x,
|
||||||
|
y,
|
||||||
|
n,
|
||||||
|
sizes,
|
||||||
|
i,
|
||||||
|
timings
|
||||||
|
);
|
||||||
|
for i in 0..5 {
|
||||||
|
totals[i] += sizes[i];
|
||||||
|
}
|
||||||
|
for i in 0..2 {
|
||||||
|
total_timings[i] += timings[i] as f32;
|
||||||
|
}
|
||||||
|
count += 1;
|
||||||
|
}
|
||||||
|
if x % 64 == 0 {
|
||||||
|
println!("Chunks processed: {}\n", count);
|
||||||
|
println!("Average lz4_chonk: {}", totals[0] / count as f32);
|
||||||
|
println!("Average deflate_chonk: {}", totals[1] / count as f32);
|
||||||
|
println!("Average lz4_dyna: {}", totals[2] / count as f32);
|
||||||
|
println!("Average deflate_dyna: {}", totals[3] / count as f32);
|
||||||
|
println!(
|
||||||
|
"Average deflate_channeled_dyna: {}",
|
||||||
|
totals[4] / count as f32
|
||||||
|
);
|
||||||
|
println!("");
|
||||||
|
println!(
|
||||||
|
"Average lz4_chonk nanos : {:02}",
|
||||||
|
total_timings[0] / count as f32
|
||||||
|
);
|
||||||
|
println!(
|
||||||
|
"Average deflate_chonk nanos: {:02}",
|
||||||
|
total_timings[1] / count as f32
|
||||||
|
);
|
||||||
|
println!("-----");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
histogram.clear();
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user