From 8ae2692b6eec9061d154176ea87ecdd15acc8b37 Mon Sep 17 00:00:00 2001 From: Joshua Yanovski Date: Wed, 16 Oct 2019 11:39:41 +0000 Subject: [PATCH] Allow canceling chunk generation. Currently we only do this when no players are in range of the chunk. We also send the first client who posted the chunk a message indicating that it's canceled, the hope being that this will be a performance win in single player mode since you don't have to wait three seconds to realize that the server won't generate the chunk for you. We now check an atomic flag for every column sample in a chunk. We could probably do this less frequently, but since it's a relaxed load it has essentially no performance impact on Intel architectures. --- Cargo.lock | 84 +- Cargo.toml | 3 + assets/voxygen/shaders/fluid-vert.glsl | 5 +- client/Cargo.toml | 2 + client/src/lib.rs | 27 +- common/src/msg/server.rs | 1 + server/src/cmd.rs | 34 +- server/src/lib.rs | 5 +- voxygen/Cargo.toml | 2 +- voxygen/src/hud/map.rs | 13 +- voxygen/src/hud/minimap.rs | 13 +- voxygen/src/hud/mod.rs | 14 +- voxygen/src/hud/settings_window.rs | 2 +- voxygen/src/session.rs | 3 +- world/Cargo.toml | 6 + world/examples/city.rs | 2 +- world/examples/turb.rs | 10 +- world/examples/water.rs | 87 ++ world/src/block/mod.rs | 37 +- world/src/column/mod.rs | 788 +++++++++++++--- world/src/config.rs | 41 +- world/src/lib.rs | 10 +- world/src/sim/erosion.rs | 1155 ++++++++++++++++++++++++ world/src/sim/mod.rs | 886 ++++++++++++++---- world/src/sim/util.rs | 150 ++- 25 files changed, 2953 insertions(+), 427 deletions(-) create mode 100644 world/examples/water.rs create mode 100644 world/src/sim/erosion.rs diff --git a/Cargo.lock b/Cargo.lock index c29c84b296..3b13351dcf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -263,6 +263,11 @@ name = "bitflags" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "bitvec" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "blake2b_simd" version = "0.5.8" @@ -485,19 +490,21 @@ dependencies = [ [[package]] name = "cocoa" -version = "0.14.0" +version = "0.18.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "block 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", - "core-graphics 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)", + "core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", + "core-graphics 0.17.3 (registry+https://github.com/rust-lang/crates.io-index)", + "foreign-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "objc 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "cocoa" -version = "0.18.4" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -566,15 +573,6 @@ name = "constant_time_eq" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "core-foundation" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "core-foundation-sys 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "core-foundation" version = "0.6.4" @@ -584,30 +582,11 @@ dependencies = [ "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "core-foundation-sys" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "core-foundation-sys" version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "core-graphics" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "core-foundation 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", - "foreign-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "core-graphics" version = "0.17.3" @@ -1927,14 +1906,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "msgbox" -version = "0.2.0" -source = "git+https://github.com/bekker/msgbox-rs.git#d3e12e1cbfcd280bb4de5ad8032bded37d8e573f" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cocoa 0.14.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cocoa 0.19.0 (registry+https://github.com/rust-lang/crates.io-index)", "gtk 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", "objc 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", - "user32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2843,6 +2821,11 @@ dependencies = [ "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "roots" +version = "0.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "rouille" version = "3.0.0" @@ -3476,15 +3459,6 @@ dependencies = [ "percent-encoding 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "user32-sys" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "utf8-ranges" version = "1.0.4" @@ -3532,7 +3506,9 @@ dependencies = [ name = "veloren-client" version = "0.4.0" dependencies = [ + "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "hashbrown 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", + "image 0.22.2 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.10.1 (registry+https://github.com/rust-lang/crates.io-index)", "specs 0.14.3 (registry+https://github.com/rust-lang/crates.io-index)", @@ -3633,7 +3609,7 @@ dependencies = [ "image 0.22.2 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "msgbox 0.2.0 (git+https://github.com/bekker/msgbox-rs.git)", + "msgbox 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "num 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "rodio 0.9.0 (git+https://github.com/RustAudio/rodio?rev=e5474a2)", @@ -3656,13 +3632,19 @@ name = "veloren-world" version = "0.4.0" dependencies = [ "arr_macro 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "bitvec 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)", "hashbrown 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "minifb 0.13.0 (git+https://github.com/emoon/rust_minifb.git)", "noise 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "num 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "ordered-float 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rayon 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "ron 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "roots 0.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "vek 0.9.9 (registry+https://github.com/rust-lang/crates.io-index)", "veloren-common 0.4.0", @@ -3888,6 +3870,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1370e9fc2a6ae53aea8b7a5110edbd08836ed87c88736dfabccade1c2b44bff4" "checksum bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4efd02e230a02e18f92fc2735f44597385ed02ad8f831e7c1c1156ee5e1ab3a5" "checksum bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8a606a02debe2813760609f57a64a2ffd27d9fdf5b2f133eaca0b248dd92cdd2" +"checksum bitvec 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)" = "461d7d0e952343f575470daeb04d38aad19675b4f170e122c6b5dd618612c8a8" "checksum blake2b_simd 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)" = "5850aeee1552f495dd0250014cf64b82b7c8879a89d83b33bbdace2cc4f63182" "checksum block 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0d8c1fef690941d3e7788d328517591fecc684c084084702d6ff1641e993699a" "checksum brotli-sys 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4445dea95f4c2b41cde57cc9fee236ae4dbae88d8fcbdb4750fc1bb5d86aaecd" @@ -3915,8 +3898,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum claxon 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f86c952727a495bda7abaf09bafdee1a939194dd793d9a8e26281df55ac43b00" "checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" "checksum cmake 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "81fb25b677f8bf1eb325017cb6bb8452f87969db0fedb4f757b297bee78a7c62" -"checksum cocoa 0.14.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b0c23085dde1ef4429df6e5896b89356d35cdd321fb43afe3e378d010bb5adc6" "checksum cocoa 0.18.4 (registry+https://github.com/rust-lang/crates.io-index)" = "cf79daa4e11e5def06e55306aa3601b87de6b5149671529318da048f67cdd77b" +"checksum cocoa 0.19.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8cd20045e880893b4a8286d5639e9ade85fb1f6a14c291f882cf8cf2149d37d9" "checksum color_quant 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0dbbb57365263e881e805dc77d94697c9118fd94d8da011240555aa7b23445bd" "checksum conrod_core 0.63.0 (git+https://gitlab.com/veloren/conrod.git)" = "" "checksum conrod_derive 0.63.0 (git+https://gitlab.com/veloren/conrod.git)" = "" @@ -3924,11 +3907,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum const-random 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "7b641a8c9867e341f3295564203b1c250eb8ce6cb6126e007941f78c4d2ed7fe" "checksum const-random-macro 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c750ec12b83377637110d5a57f5ae08e895b06c4b16e2bdbf1a94ef717428c59" "checksum constant_time_eq 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "995a44c877f9212528ccc74b21a232f66ad69001e40ede5bcee2ac9ef2657120" -"checksum core-foundation 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "286e0b41c3a20da26536c6000a280585d519fd07b3956b43aed8a79e9edce980" "checksum core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "25b9e03f145fd4f2bf705e07b900cd41fc636598fe5dc452fd0db1441c3f496d" -"checksum core-foundation-sys 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "716c271e8613ace48344f723b60b900a93150271e5be206212d052bbc0883efa" "checksum core-foundation-sys 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e7ca8a5221364ef15ce201e8ed2f609fc312682a8f4e0e3d4aa5879764e0fa3b" -"checksum core-graphics 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fb0ed45fdc32f9ab426238fba9407dfead7bacd7900c9b4dd3f396f46eafdae3" "checksum core-graphics 0.17.3 (registry+https://github.com/rust-lang/crates.io-index)" = "56790968ab1c8a1202a102e6de05fc6e1ec87da99e4e93e9a7d13efbfc1e95a9" "checksum coreaudio-rs 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f229761965dad3e9b11081668a6ea00f1def7aa46062321b5ec245b834f6e491" "checksum coreaudio-sys 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7e8f5954c1c7ccb55340443e8b29fca24013545a5e7d72c1ca7db4fc02b982ce" @@ -4071,7 +4051,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum mio-extras 2.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "46e73a04c2fa6250b8d802134d56d554a9ec2922bf977777c805ea5def61ce40" "checksum miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f2f3b1cf331de6896aabf6e9d55dca90356cc9960cca7eaaf408a355ae919" "checksum mopa 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a785740271256c230f57462d3b83e52f998433a7062fc18f96d5999474a9f915" -"checksum msgbox 0.2.0 (git+https://github.com/bekker/msgbox-rs.git)" = "" +"checksum msgbox 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "82cb63d8d7be875323a43d9ab525c28ce2d65bff89648d1aedd9962e00dede00" "checksum multipart 0.15.4 (registry+https://github.com/rust-lang/crates.io-index)" = "adba94490a79baf2d6a23eac897157047008272fa3eecb3373ae6377b91eca28" "checksum net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)" = "42550d9fb7b6684a6d404d9fa7250c2eb2646df731d1c06afc06dcee9e1bcf88" "checksum nix 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6c722bee1037d430d0f8e687bbdbf222f27cc6e4e68d5caf630857bb2b6dbdce" @@ -4170,6 +4150,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e" "checksum rodio 0.9.0 (git+https://github.com/RustAudio/rodio?rev=e5474a2)" = "" "checksum ron 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2ece421e0c4129b90e4a35b6f625e472e96c552136f5093a2f4fa2bbb75a62d5" +"checksum roots 0.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e4c67c712ab62be58b24ab8960e2b95dd4ee00aac115c76f2709657821fe376d" "checksum rouille 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "112568052ec17fa26c6c11c40acbb30d3ad244bf3d6da0be181f5e7e42e5004f" "checksum rust-argon2 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4ca4eaef519b494d1f2848fc602d18816fed808a981aedf4f1f00ceb7c9d32cf" "checksum rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783" @@ -4246,7 +4227,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" "checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c" "checksum url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dd4e7c0d531266369519a4aa4f399d748bd37043b00bde1e4ff1f60a120b355a" -"checksum user32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4ef4711d107b21b410a3a974b1204d9accc8b10dad75d8324b5d755de1617d47" "checksum utf8-ranges 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b4ae116fef2b7fea257ed6440d3cfcff7f190865f170cdad00bb6465bf18ecba" "checksum uvth 3.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e59a167890d173eb0fcd7a1b99b84dc05c521ae8d76599130b8e19bef287abbf" "checksum vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a" diff --git a/Cargo.toml b/Cargo.toml index 26718f0286..e74b2ca445 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,8 +12,11 @@ members = [ [profile.dev] opt-level = 2 overflow-checks = false +panic = "abort" +# debug = false [profile.release] +# panic = "abort" debug = false codegen-units = 1 lto = true diff --git a/assets/voxygen/shaders/fluid-vert.glsl b/assets/voxygen/shaders/fluid-vert.glsl index f7582cca07..97381567e6 100644 --- a/assets/voxygen/shaders/fluid-vert.glsl +++ b/assets/voxygen/shaders/fluid-vert.glsl @@ -1,6 +1,7 @@ #version 330 core #include +#include in uint v_pos_norm; in uint v_col_light; @@ -35,11 +36,11 @@ void main() { // Use an array to avoid conditional branching f_norm = normals[norm_axis + norm_dir]; - f_col = vec3( + f_col = srgb_to_linear(vec3( float((v_col_light >> 8) & 0xFFu), float((v_col_light >> 16) & 0xFFu), float((v_col_light >> 24) & 0xFFu) - ) / 200.0; + ) / 255.0); f_light = float(v_col_light & 0xFFu) / 255.0; diff --git a/client/Cargo.toml b/client/Cargo.toml index 0a485cfc21..4c70c12b35 100644 --- a/client/Cargo.toml +++ b/client/Cargo.toml @@ -7,7 +7,9 @@ edition = "2018" [dependencies] common = { package = "veloren-common", path = "../common" } +byteorder = "1.3.2" uvth = "3.1.1" +image = "0.22.0" num_cpus = "1.10.1" log = "0.4.8" specs = "0.14.2" diff --git a/client/src/lib.rs b/client/src/lib.rs index b242c00c70..93ffa5ac72 100644 --- a/client/src/lib.rs +++ b/client/src/lib.rs @@ -20,6 +20,7 @@ use common::{ ChatType, }; use hashbrown::HashMap; +use image::DynamicImage; use log::warn; use std::{ net::SocketAddr, @@ -43,6 +44,7 @@ pub struct Client { client_state: ClientState, thread_pool: ThreadPool, pub server_info: ServerInfo, + pub world_map: Arc, postbox: PostBox, @@ -67,11 +69,12 @@ impl Client { let mut postbox = PostBox::to(addr)?; // Wait for initial sync - let (state, entity, server_info) = match postbox.next_message() { + let (state, entity, server_info, world_map) = match postbox.next_message() { Some(ServerMsg::InitialSync { ecs_state, entity_uid, server_info, + // world_map: /*(map_size, world_map)*/map_size, }) => { // TODO: Voxygen should display this. if server_info.git_hash != common::util::GIT_HASH.to_string() { @@ -87,7 +90,24 @@ impl Client { .ecs() .entity_from_uid(entity_uid) .ok_or(Error::ServerWentMad)?; - (state, entity, server_info) + + // assert_eq!(world_map.len(), map_size.x * map_size.y); + let map_size = Vec2::new(1024, 1024); + let world_map_raw = vec![0u8; 4 * /*world_map.len()*/map_size.x * map_size.y]; + // LittleEndian::write_u32_into(&world_map, &mut world_map_raw); + log::info!("Preparing image..."); + let world_map = Arc::new(image::DynamicImage::ImageRgba8({ + // Should not fail if the dimensions are correct. + let world_map = image::ImageBuffer::from_raw( + map_size.x as u32, + map_size.y as u32, + world_map_raw, + ); + world_map.ok_or(Error::Other("Server sent a bad world map image".into()))? + })); + log::info!("Done preparing image..."); + + (state, entity, server_info, world_map) } Some(ServerMsg::Error(ServerError::TooManyPlayers)) => { return Err(Error::TooManyPlayers) @@ -107,6 +127,7 @@ impl Client { client_state, thread_pool, server_info, + world_map, postbox, @@ -172,7 +193,7 @@ impl Client { } pub fn set_view_distance(&mut self, view_distance: u32) { - self.view_distance = Some(view_distance.max(1).min(25)); + self.view_distance = Some(view_distance.max(1).min(65)); self.postbox .send_message(ClientMsg::SetViewDistance(self.view_distance.unwrap())); // Can't fail diff --git a/common/src/msg/server.rs b/common/src/msg/server.rs index 50ef9081c5..b8b71b1ea2 100644 --- a/common/src/msg/server.rs +++ b/common/src/msg/server.rs @@ -28,6 +28,7 @@ pub enum ServerMsg { ecs_state: sphynx::StatePackage, entity_uid: u64, server_info: ServerInfo, + // world_map: Vec2, /*, Vec)*/ }, StateAnswer(Result), ForceState(ClientState), diff --git a/server/src/cmd.rs b/server/src/cmd.rs index 0c0fa4d502..02a648f981 100644 --- a/server/src/cmd.rs +++ b/server/src/cmd.rs @@ -10,10 +10,13 @@ use common::{ msg::ServerMsg, npc::{get_npc_name, NpcKind}, state::TimeOfDay, + terrain::TerrainChunkSize, + vol::RectVolSize, }; use rand::Rng; use specs::{Builder, Entity as EcsEntity, Join}; use vek::*; +use world::util::Sampler; use lazy_static::lazy_static; use scan_fmt::{scan_fmt, scan_fmt_some}; @@ -887,6 +890,7 @@ fn handle_tell(server: &mut Server, entity: EcsEntity, args: String, action: &Ch fn handle_debug_column(server: &mut Server, entity: EcsEntity, args: String, action: &ChatCommand) { let sim = server.world.sim(); + let sampler = server.world.sample_columns(); if let Ok((x, y)) = scan_fmt!(&args, action.arg_fmt, i32, i32) { let wpos = Vec2::new(x, y); /* let chunk_pos = wpos.map2(TerrainChunkSize::RECT_SIZE, |e, sz: u32| { @@ -895,26 +899,48 @@ fn handle_debug_column(server: &mut Server, entity: EcsEntity, args: String, act let foo = || { // let sim_chunk = sim.get(chunk_pos)?; - let alt_base = sim.get_interpolated(wpos, |chunk| chunk.alt_base)?; let alt = sim.get_interpolated(wpos, |chunk| chunk.alt)?; + let water_alt = sim.get_interpolated(wpos, |chunk| chunk.water_alt)?; let chaos = sim.get_interpolated(wpos, |chunk| chunk.chaos)?; let temp = sim.get_interpolated(wpos, |chunk| chunk.temp)?; let humidity = sim.get_interpolated(wpos, |chunk| chunk.humidity)?; let rockiness = sim.get_interpolated(wpos, |chunk| chunk.rockiness)?; let tree_density = sim.get_interpolated(wpos, |chunk| chunk.tree_density)?; let spawn_rate = sim.get_interpolated(wpos, |chunk| chunk.spawn_rate)?; + let chunk_pos = wpos.map2(TerrainChunkSize::RECT_SIZE, |e, sz: u32| e / sz as i32); + let chunk = sim.get(chunk_pos)?; + let col = sampler.get(wpos)?; + let downhill = chunk.downhill; + let river = &chunk.river; + let flux = chunk.flux; Some(format!( r#"wpos: {:?} -alt_base {:?} -alt {:?} +alt {:?} ({:?}) +water_alt {:?} ({:?}) +river {:?} +downhill {:?} chaos {:?} +flux {:?} temp {:?} humidity {:?} rockiness {:?} tree_density {:?} spawn_rate {:?} "#, - wpos, alt_base, alt, chaos, temp, humidity, rockiness, tree_density, spawn_rate + wpos, + alt, + col.alt, + water_alt, + col.water_level, + river, + downhill, + chaos, + flux, + temp, + humidity, + rockiness, + tree_density, + spawn_rate )) }; if let Some(s) = foo() { diff --git a/server/src/lib.rs b/server/src/lib.rs index 5415b504b5..88611395a3 100644 --- a/server/src/lib.rs +++ b/server/src/lib.rs @@ -98,7 +98,7 @@ impl Server { let mut state = State::default(); state .ecs_mut() - .add_resource(SpawnPoint(Vec3::new(16_384.0, 16_384.0, 190.0))); + .add_resource(SpawnPoint(Vec3::new(16_384.0, 16_384.0, 600.0))); state .ecs_mut() .add_resource(EventBus::::default()); @@ -867,11 +867,14 @@ impl Server { client.notify(ServerMsg::Error(ServerError::TooManyPlayers)); } else { // Return the state of the current world (all of the components that Sphynx tracks). + log::info!("Starting initial sync with client."); client.notify(ServerMsg::InitialSync { ecs_state: self.state.ecs().gen_state_package(), entity_uid: self.state.ecs().uid_from_entity(entity).unwrap().into(), // Can't fail. server_info: self.server_info.clone(), + // world_map: (WORLD_SIZE/*, self.world.sim().get_map()*/), }); + log::info!("Done initial sync with client."); frontend_events.push(Event::ClientConnected { entity }); } diff --git a/voxygen/Cargo.toml b/voxygen/Cargo.toml index 8264426b50..50e957e61b 100644 --- a/voxygen/Cargo.toml +++ b/voxygen/Cargo.toml @@ -50,7 +50,7 @@ serde_derive = "1.0.98" ron = "0.5.1" guillotiere = "0.4.2" simplelog = "0.6.0" -msgbox = { git = "https://github.com/bekker/msgbox-rs.git", optional = true } +msgbox = { version = "0.4.0", optional = true } directories = "2.0.2" num = "0.2.0" backtrace = "0.3.33" diff --git a/voxygen/src/hud/map.rs b/voxygen/src/hud/map.rs index bacc12e18b..936e2c2792 100644 --- a/voxygen/src/hud/map.rs +++ b/voxygen/src/hud/map.rs @@ -3,6 +3,7 @@ use client::{self, Client}; use common::comp; use conrod_core::{ color, + image::Id, widget::{self, Button, Image, Rectangle, Text}, widget_ids, Colorable, Positionable, Sizeable, Widget, WidgetCommon, }; @@ -30,16 +31,24 @@ pub struct Map<'a> { _show: &'a Show, client: &'a Client, + _world_map: Id, imgs: &'a Imgs, fonts: &'a Fonts, #[conrod(common_builder)] common: widget::CommonBuilder, } impl<'a> Map<'a> { - pub fn new(show: &'a Show, client: &'a Client, imgs: &'a Imgs, fonts: &'a Fonts) -> Self { + pub fn new( + show: &'a Show, + client: &'a Client, + imgs: &'a Imgs, + world_map: Id, + fonts: &'a Fonts, + ) -> Self { Self { _show: show, imgs, + _world_map: world_map, client, fonts: fonts, common: widget::CommonBuilder::default(), @@ -132,7 +141,7 @@ impl<'a> Widget for Map<'a> { .set(state.ids.location_name, ui), } // Map Image - Image::new(self.imgs.map_placeholder) + Image::new(/*self.world_map*/ self.imgs.map_placeholder) .middle_of(state.ids.map_bg) .w_h(700.0, 700.0) .parent(state.ids.map_bg) diff --git a/voxygen/src/hud/minimap.rs b/voxygen/src/hud/minimap.rs index b380ca551b..ba73702f83 100644 --- a/voxygen/src/hud/minimap.rs +++ b/voxygen/src/hud/minimap.rs @@ -3,6 +3,7 @@ use client::{self, Client}; use common::comp; use conrod_core::{ color, + image::Id, widget::{self, Button, Image, Rectangle, Text}, widget_ids, Color, Colorable, Positionable, Sizeable, Widget, WidgetCommon, }; @@ -29,17 +30,25 @@ pub struct MiniMap<'a> { client: &'a Client, imgs: &'a Imgs, + _world_map: Id, fonts: &'a Fonts, #[conrod(common_builder)] common: widget::CommonBuilder, } impl<'a> MiniMap<'a> { - pub fn new(show: &'a Show, client: &'a Client, imgs: &'a Imgs, fonts: &'a Fonts) -> Self { + pub fn new( + show: &'a Show, + client: &'a Client, + imgs: &'a Imgs, + world_map: Id, + fonts: &'a Fonts, + ) -> Self { Self { show, client, imgs, + _world_map: world_map, fonts: fonts, common: widget::CommonBuilder::default(), } @@ -88,7 +97,7 @@ impl<'a> Widget for MiniMap<'a> { .mid_top_with_margin_on(state.ids.mmap_frame, 13.0 * 2.0 + 2.0) .set(state.ids.mmap_frame_bg, ui); // Map Image - Image::new(self.imgs.map_placeholder) + Image::new(/*self.world_map*/ self.imgs.map_placeholder) .middle_of(state.ids.mmap_frame_bg) .w_h(92.0 * 2.0, 82.0 * 2.0) .parent(state.ids.mmap_frame_bg) diff --git a/voxygen/src/hud/mod.rs b/voxygen/src/hud/mod.rs index abe145dea7..4b02ecdb05 100644 --- a/voxygen/src/hud/mod.rs +++ b/voxygen/src/hud/mod.rs @@ -37,13 +37,14 @@ use crate::{ render::{AaMode, Consts, Globals, Renderer}, scene::camera::Camera, //settings::ControlSettings, - ui::{Ingameable, ScaleMode, Ui}, + ui::{Graphic, Ingameable, ScaleMode, Ui}, window::{Event as WinEvent, GameInput}, GlobalState, }; use client::{Client, Event as ClientEvent}; use common::{comp, terrain::TerrainChunk, vol::RectRasterableVol}; use conrod_core::{ + image::Id, text::cursor::Index, widget::{self, Button, Image, Rectangle, Text}, widget_ids, Color, Colorable, Labelable, Positionable, Sizeable, Widget, @@ -121,6 +122,7 @@ widget_ids! { // External chat, map, + world_map, character_window, minimap, bag, @@ -371,6 +373,7 @@ impl Show { pub struct Hud { ui: Ui, ids: Ids, + world_map: Id, imgs: Imgs, item_imgs: ItemImgs, fonts: Fonts, @@ -385,7 +388,7 @@ pub struct Hud { } impl Hud { - pub fn new(global_state: &mut GlobalState) -> Self { + pub fn new(global_state: &mut GlobalState, client: &Client) -> Self { let window = &mut global_state.window; let settings = &global_state.settings; @@ -393,6 +396,8 @@ impl Hud { ui.set_scaling_mode(settings.gameplay.ui_scale); // Generate ids. let ids = Ids::new(ui.id_generator()); + // Load world map + let world_map = ui.add_graphic(Graphic::Image(client.world_map.clone())); // Load images. let imgs = Imgs::load(&mut ui).expect("Failed to load images!"); // Load rotation images. @@ -405,6 +410,7 @@ impl Hud { Self { ui, imgs, + world_map, rot_imgs, item_imgs, fonts, @@ -737,7 +743,7 @@ impl Hud { } // MiniMap - match MiniMap::new(&self.show, client, &self.imgs, &self.fonts) + match MiniMap::new(&self.show, client, &self.imgs, self.world_map, &self.fonts) .set(self.ids.minimap, ui_widgets) { Some(minimap::Event::Toggle) => self.show.toggle_mini_map(), @@ -923,7 +929,7 @@ impl Hud { } // Map if self.show.map { - match Map::new(&self.show, client, &self.imgs, &self.fonts) + match Map::new(&self.show, client, &self.imgs, self.world_map, &self.fonts) .set(self.ids.map, ui_widgets) { Some(map::Event::Close) => { diff --git a/voxygen/src/hud/settings_window.rs b/voxygen/src/hud/settings_window.rs index 0584dbdc99..904d432d98 100644 --- a/voxygen/src/hud/settings_window.rs +++ b/voxygen/src/hud/settings_window.rs @@ -1139,7 +1139,7 @@ impl<'a> Widget for SettingsWindow<'a> { if let Some(new_val) = ImageSlider::discrete( self.global_state.settings.graphics.view_distance, 1, - 25, + 65, self.imgs.slider_indicator, self.imgs.slider, ) diff --git a/voxygen/src/session.rs b/voxygen/src/session.rs index 613c19085a..040912e180 100644 --- a/voxygen/src/session.rs +++ b/voxygen/src/session.rs @@ -38,12 +38,13 @@ impl SessionState { scene .camera_mut() .set_fov_deg(global_state.settings.graphics.fov); + let hud = Hud::new(global_state, &client.borrow()); Self { scene, client, key_state: KeyState::new(), controller: comp::Controller::default(), - hud: Hud::new(global_state), + hud, selected_block: Block::new(BlockKind::Normal, Rgb::broadcast(255)), } } diff --git a/world/Cargo.toml b/world/Cargo.toml index e356364917..f42ac3b417 100644 --- a/world/Cargo.toml +++ b/world/Cargo.toml @@ -6,13 +6,19 @@ edition = "2018" [dependencies] common = { package = "veloren-common", path = "../common" } +bitvec = "0.15.1" vek = "0.9.9" noise = "0.5.1" +num = "0.2.0" +ordered-float = "1.0" hashbrown = { version = "0.6.0", features = ["serde"] } lazy_static = "1.4.0" +log = "0.4.8" rand = "0.7.2" rand_chacha = "0.2.1" arr_macro = "0.1.2" +rayon = "1.1.0" +roots = "0.0.5" serde = "1.0.101" ron = "0.5.1" diff --git a/world/examples/city.rs b/world/examples/city.rs index 8774b3daf9..e98b379220 100644 --- a/world/examples/city.rs +++ b/world/examples/city.rs @@ -1,5 +1,5 @@ use rand::thread_rng; -use std::ops::{Add, Mul, Sub}; + use vek::*; use veloren_world::sim::Settlement; diff --git a/world/examples/turb.rs b/world/examples/turb.rs index 6fa2f94a4b..4f811e33b6 100644 --- a/world/examples/turb.rs +++ b/world/examples/turb.rs @@ -1,8 +1,6 @@ -use noise::{NoiseFn, Seedable, SuperSimplex}; -use rand::thread_rng; -use std::ops::{Add, Mul, Sub}; +use noise::{Seedable, SuperSimplex}; + use vek::*; -use veloren_world::sim::Settlement; const W: usize = 640; const H: usize = 640; @@ -10,8 +8,8 @@ const H: usize = 640; fn main() { let mut win = minifb::Window::new("Turb", W, H, minifb::WindowOptions::default()).unwrap(); - let nz_x = SuperSimplex::new().set_seed(0); - let nz_y = SuperSimplex::new().set_seed(1); + let _nz_x = SuperSimplex::new().set_seed(0); + let _nz_y = SuperSimplex::new().set_seed(1); let mut time = 0.0f64; while win.is_open() { diff --git a/world/examples/water.rs b/world/examples/water.rs new file mode 100644 index 0000000000..8c2c6b2ab0 --- /dev/null +++ b/world/examples/water.rs @@ -0,0 +1,87 @@ +use vek::*; +use veloren_world::{ + sim::{RiverKind, WORLD_SIZE}, + util::Sampler, + World, CONFIG, +}; + +const W: usize = 1024; +const H: usize = 1024; + +fn main() { + let world = World::generate(1337); + + let sampler = world.sim(); + + let mut win = + minifb::Window::new("World Viewer", W, H, minifb::WindowOptions::default()).unwrap(); + + let mut focus = Vec2::zero(); + let mut gain = 1.0; + let mut scale = (WORLD_SIZE.x / W) as i32; + + while win.is_open() { + let mut buf = vec![0; W * H]; + + for i in 0..W { + for j in 0..H { + let pos = focus + Vec2::new(i as i32, j as i32) * scale; + + let (alt, water_alt, river_kind) = sampler + .get(pos) + .map(|sample| (sample.alt, sample.water_alt, sample.river.river_kind)) + .unwrap_or((CONFIG.sea_level, CONFIG.sea_level, None)); + let alt = ((alt - CONFIG.sea_level) / CONFIG.mountain_scale) + .min(1.0) + .max(0.0); + let water_alt = ((alt.max(water_alt) - CONFIG.sea_level) / CONFIG.mountain_scale) + .min(1.0) + .max(0.0); + buf[j * W + i] = match river_kind { + Some(RiverKind::Ocean) => u32::from_le_bytes([64, 32, 0, 255]), + Some(RiverKind::Lake { .. }) => u32::from_le_bytes([ + 64 + (water_alt * 191.0) as u8, + 32 + (water_alt * 95.0) as u8, + 0, + 255, + ]), + Some(RiverKind::River { .. }) => u32::from_le_bytes([ + 64 + (alt * 191.0) as u8, + 32 + (alt * 95.0) as u8, + 0, + 255, + ]), + None => u32::from_le_bytes([0, (alt * 255.0) as u8, 0, 255]), + }; + } + } + + let spd = 32; + if win.is_key_down(minifb::Key::W) { + focus.y -= spd * scale; + } + if win.is_key_down(minifb::Key::A) { + focus.x -= spd * scale; + } + if win.is_key_down(minifb::Key::S) { + focus.y += spd * scale; + } + if win.is_key_down(minifb::Key::D) { + focus.x += spd * scale; + } + if win.is_key_down(minifb::Key::Q) { + gain += 10.0; + } + if win.is_key_down(minifb::Key::E) { + gain -= 10.0; + } + if win.is_key_down(minifb::Key::R) { + scale += 1; + } + if win.is_key_down(minifb::Key::F) { + scale = (scale - 1).max(0); + } + + win.update_with_buffer(&buf).unwrap(); + } +} diff --git a/world/src/block/mod.rs b/world/src/block/mod.rs index 66efd8103f..9559185177 100644 --- a/world/src/block/mod.rs +++ b/world/src/block/mod.rs @@ -150,8 +150,8 @@ impl<'a> BlockGen<'a> { let &ColumnSample { alt, chaos, - water_level: _, - //river, + water_level, + warp_factor, surface_color, sub_surface_color, //tree_density, @@ -179,7 +179,7 @@ impl<'a> BlockGen<'a> { let (_definitely_underground, height, on_cliff, water_height) = if (wposf.z as f32) < alt - 64.0 * chaos { // Shortcut warping - (true, alt, false, CONFIG.sea_level /*water_level*/) + (true, alt, false, water_level) } else { // Apply warping let warp = world @@ -189,6 +189,7 @@ impl<'a> BlockGen<'a> { .get(wposf.div(24.0)) .mul((chaos - 0.1).max(0.0).powf(2.0)) .mul(48.0); + let warp = Lerp::lerp(0.0, warp, warp_factor); let surface_height = alt + warp; @@ -221,7 +222,11 @@ impl<'a> BlockGen<'a> { false, height, on_cliff, - /*(water_level + warp).max(*/ CONFIG.sea_level, /*)*/ + (if water_level <= alt { + water_level + warp + } else { + water_level + }), ) }; @@ -371,7 +376,7 @@ impl<'a> BlockGen<'a> { .add(1.0) > 0.9993; - if cave { + if cave && wposf.z as f32 > water_height + 3.0 { None } else { Some(block) @@ -420,13 +425,15 @@ pub struct ZCache<'a> { impl<'a> ZCache<'a> { pub fn get_z_limits(&self, block_gen: &mut BlockGen) -> (f32, f32, f32) { - let cave_depth = if self.sample.cave_xy.abs() > 0.9 { - (self.sample.alt - self.sample.cave_alt + 8.0).max(0.0) - } else { - 0.0 - }; + let cave_depth = + if self.sample.cave_xy.abs() > 0.9 && self.sample.water_level <= self.sample.alt { + (self.sample.alt - self.sample.cave_alt + 8.0).max(0.0) + } else { + 0.0 + }; - let min = self.sample.alt - (self.sample.chaos * 48.0 + cave_depth) - 4.0; + let min = self.sample.alt - (self.sample.chaos * 48.0 + cave_depth); + let min = min - 4.0; let cliff = BlockGen::get_cliff_height( &mut block_gen.column_gen, @@ -462,9 +469,7 @@ impl<'a> ZCache<'a> { let ground_max = (self.sample.alt + warp + rocks).max(cliff) + 2.0; let min = min + structure_min; - let max = (ground_max + structure_max) - .max(self.sample.water_level) - .max(CONFIG.sea_level + 2.0); + let max = (ground_max + structure_max).max(self.sample.water_level + 2.0); // Structures let (min, max) = self @@ -479,9 +484,7 @@ impl<'a> ZCache<'a> { }) .unwrap_or((min, max)); - let structures_only_min_z = ground_max - .max(self.sample.water_level) - .max(CONFIG.sea_level + 2.0); + let structures_only_min_z = ground_max.max(self.sample.water_level + 2.0); (min, structures_only_min_z, max) } diff --git a/world/src/column/mod.rs b/world/src/column/mod.rs index 9f3e62d99f..0b9f687bea 100644 --- a/world/src/column/mod.rs +++ b/world/src/column/mod.rs @@ -2,7 +2,10 @@ use crate::{ all::ForestKind, block::StructureMeta, generator::{Generator, SpawnRules, TownGen}, - sim::{LocationInfo, SimChunk, WorldSim}, + sim::{ + local_cells, uniform_idx_as_vec2, vec2_as_uniform_idx, LocationInfo, RiverKind, SimChunk, + WorldSim, + }, util::{RandomPerm, Sampler, UnitChooser}, CONFIG, }; @@ -13,8 +16,10 @@ use common::{ }; use lazy_static::lazy_static; use noise::NoiseFn; +use roots::find_roots_cubic; use std::{ - f32, + cmp::Reverse, + f32, f64, ops::{Add, Div, Mul, Neg, Sub}, sync::Arc, }; @@ -76,14 +81,15 @@ impl<'a> ColumnGen<'a> { if seed % 5 == 2 && chunk.temp > CONFIG.desert_temp - && chunk.alt > CONFIG.sea_level + 5.0 + && chunk.alt > chunk.water_alt + 5.0 && chunk.chaos <= 0.35 { - Some(StructureData { + /*Some(StructureData { pos, seed, meta: Some(StructureMeta::Pyramid { height: 140 }), - }) + })*/ + None } else if seed % 17 == 2 && chunk.chaos < 0.2 { Some(StructureData { pos, @@ -118,6 +124,92 @@ impl<'a> ColumnGen<'a> { } } +fn river_spline_coeffs( + // _sim: &WorldSim, + chunk_pos: Vec2, + spline_derivative: Vec2, + downhill_pos: Vec2, +) -> Vec3> { + let dxy = downhill_pos - chunk_pos; + // Since all splines have been precomputed, we don't have to do that much work to evaluate the + // spline. The spline is just ax^2 + bx + c = 0, where + // + // a = dxy - chunk.river.spline_derivative + // b = chunk.river.spline_derivative + // c = chunk_pos + let spline_derivative = spline_derivative.map(|e| e as f64); + Vec3::new(dxy - spline_derivative, spline_derivative, chunk_pos) +} + +/// Find the nearest point from a quadratic spline to this point (in terms of t, the "distance along the curve" +/// by which our spline is parameterized). Note that if t < 0.0 or t >= 1.0, we probably shouldn't +/// be considered "on the curve"... hopefully this works out okay and gives us what we want (a +/// river that extends outwards tangent to a quadratic curve, with width configured by distance +/// along the line). +fn quadratic_nearest_point( + spline: &Vec3>, + point: Vec2, +) -> Option<(f64, Vec2, f64)> { + let a = spline.z.x; + let b = spline.y.x; + let c = spline.x.x; + let d = point.x; + let e = spline.z.y; + let f = spline.y.y; + let g = spline.x.y; + let h = point.y; + // This is equivalent to solving the following cubic equation (derivation is a bit annoying): + // + // A = 2(c^2 + g^2) + // B = 3(b * c + g * f) + // C = ((a - d) * 2 * c + b^2 + (e - h) * 2 * g + f^2) + // D = ((a - d) * b + (e - h) * f) + // + // Ax³ + Bx² + Cx + D = 0 + // + // Once solved, this yield up to three possible values for t (reflecting minimal and maximal + // values). We should choose the minimal such real value with t between 0.0 and 1.0. If we + // fall outside those bounds, then we are outside the spline and return None. + let a_ = (c * c + g * g) * 2.0; + let b_ = (b * c + g * f) * 3.0; + let a_d = a - d; + let e_h = e - h; + let c_ = a_d * c * 2.0 + b * b + e_h * g * 2.0 + f * f; + let d_ = a_d * b + e_h * f; + let roots = find_roots_cubic(a_, b_, c_, d_); + let roots = roots.as_ref(); + + let min_root = roots + .into_iter() + .copied() + .filter_map(|root| { + let river_point = spline.x * root * root + spline.y * root + spline.z; + let river_zero = spline.z; + let river_one = spline.x + spline.y + spline.z; + if root > 0.0 && root < 1.0 { + Some((root, river_point)) + } else if river_point.distance_squared(river_zero) < 0.5 { + Some((root, /*river_point*/ river_zero)) + } else if river_point.distance_squared(river_one) < 0.5 { + Some((root, /*river_point*/ river_one)) + } else { + None + } + }) + .map(|(root, river_point)| { + let river_distance = river_point.distance_squared(point); + (root, river_point, river_distance) + }) + // In the (unlikely?) case that distances are equal, prefer the earliest point along the + // river. + .min_by(|&(ap, _, a), &(bp, _, b)| { + (a, ap < 0.0 || ap > 1.0, ap) + .partial_cmp(&(b, bp < 0.0 || bp > 1.0, bp)) + .unwrap() + }); + min_root +} + impl<'a> Sampler<'a> for ColumnGen<'a> { type Index = Vec2; type Sample = Option>; @@ -134,29 +226,328 @@ impl<'a> Sampler<'a> for ColumnGen<'a> { ) * 12.0; let wposf_turb = wposf + turb.map(|e| e as f64); - let alt_base = sim.get_interpolated(wpos, |chunk| chunk.alt_base)?; let chaos = sim.get_interpolated(wpos, |chunk| chunk.chaos)?; let temp = sim.get_interpolated(wpos, |chunk| chunk.temp)?; let humidity = sim.get_interpolated(wpos, |chunk| chunk.humidity)?; let rockiness = sim.get_interpolated(wpos, |chunk| chunk.rockiness)?; let tree_density = sim.get_interpolated(wpos, |chunk| chunk.tree_density)?; let spawn_rate = sim.get_interpolated(wpos, |chunk| chunk.spawn_rate)?; + let alt = sim.get_interpolated_monotone(wpos, |chunk| chunk.alt)?; let sim_chunk = sim.get(chunk_pos)?; + let neighbor_coef = TerrainChunkSize::RECT_SIZE.map(|e| e as f64); + let my_chunk_idx = vec2_as_uniform_idx(chunk_pos); + let neighbor_river_data = local_cells(my_chunk_idx).filter_map(|neighbor_idx: usize| { + let neighbor_pos = uniform_idx_as_vec2(neighbor_idx); + let neighbor_chunk = sim.get(neighbor_pos)?; + Some((neighbor_pos, neighbor_chunk, &neighbor_chunk.river)) + }); + let lake_width = (TerrainChunkSize::RECT_SIZE.x as f64 * (2.0f64.sqrt())) + 12.0; + let neighbor_river_data = neighbor_river_data.map(|(posj, chunkj, river)| { + let kind = match river.river_kind { + Some(kind) => kind, + None => { + return (posj, chunkj, river, None); + } + }; + let downhill_pos = if let Some(pos) = chunkj.downhill { + pos + } else { + match kind { + RiverKind::River { .. } => { + log::error!("What? River: {:?}, Pos: {:?}", river, posj); + panic!("How can a river have no downhill?"); + } + RiverKind::Lake { .. } => { + return (posj, chunkj, river, None); + } + RiverKind::Ocean => posj, + } + }; + let downhill_wpos = downhill_pos.map(|e| e as f64); + let downhill_pos = + downhill_pos.map2(TerrainChunkSize::RECT_SIZE, |e, sz: u32| e / sz as i32); + let neighbor_pos = posj.map(|e| e as f64) * neighbor_coef; + let direction = neighbor_pos - downhill_wpos; + let river_width_min = if let RiverKind::River { cross_section } = kind { + cross_section.x as f64 + } else { + lake_width + }; + let downhill_chunk = sim.get(downhill_pos).expect("How can this not work?"); + let coeffs = + river_spline_coeffs(neighbor_pos, chunkj.river.spline_derivative, downhill_wpos); + let (direction, coeffs, downhill_chunk, river_t, river_pos, river_dist) = match kind { + RiverKind::River { .. } => { + if let Some((t, pt, dist)) = quadratic_nearest_point(&coeffs, wposf) { + (direction, coeffs, downhill_chunk, t, pt, dist.sqrt()) + } else { + let ndist = wposf.distance_squared(neighbor_pos); + let ddist = wposf.distance_squared(downhill_wpos); + let (closest_pos, closest_dist, closest_t) = if ndist <= ddist { + (neighbor_pos, ndist, 0.0) + } else { + (downhill_wpos, ddist, 1.0) + }; + ( + direction, + coeffs, + downhill_chunk, + closest_t, + closest_pos, + closest_dist.sqrt(), + ) + } + } + RiverKind::Lake { neighbor_pass_pos } => { + let pass_dist = neighbor_pass_pos + .map2( + neighbor_pos + .map2(TerrainChunkSize::RECT_SIZE, |f, g| (f as i32, g as i32)), + |e, (f, g)| ((e - f) / g).abs(), + ) + .reduce_partial_max(); + let spline_derivative = river.spline_derivative; + let neighbor_pass_pos = if pass_dist <= 1 { + neighbor_pass_pos + } else { + downhill_wpos.map(|e| e as i32) + }; + let pass_dist = neighbor_pass_pos + .map2( + neighbor_pos + .map2(TerrainChunkSize::RECT_SIZE, |f, g| (f as i32, g as i32)), + |e, (f, g)| ((e - f) / g).abs(), + ) + .reduce_partial_max(); + if pass_dist > 1 { + return (posj, chunkj, river, None); + } + let neighbor_pass_wpos = neighbor_pass_pos.map(|e| e as f64); + let neighbor_pass_pos = neighbor_pass_pos + .map2(TerrainChunkSize::RECT_SIZE, |e, sz: u32| e / sz as i32); + let coeffs = + river_spline_coeffs(neighbor_pos, spline_derivative, neighbor_pass_wpos); + let direction = neighbor_pos - neighbor_pass_wpos; + if let Some((t, pt, dist)) = quadratic_nearest_point(&coeffs, wposf) { + ( + direction, + coeffs, + sim.get(neighbor_pass_pos).expect("Must already work"), + t, + pt, + dist.sqrt(), + ) + } else { + let ndist = wposf.distance_squared(neighbor_pos); + /* let ddist = wposf.distance_squared(neighbor_pass_wpos); */ + let (closest_pos, closest_dist, closest_t) = /*if ndist <= ddist */ { + (neighbor_pos, ndist, 0.0) + } /* else { + (neighbor_pass_wpos, ddist, 1.0) + } */; + ( + direction, + coeffs, + sim.get(neighbor_pass_pos).expect("Must already work"), + closest_t, + closest_pos, + closest_dist.sqrt(), + ) + } + } + RiverKind::Ocean => { + let ndist = wposf.distance_squared(neighbor_pos); + let (closest_pos, closest_dist, closest_t) = (neighbor_pos, ndist, 0.0); + ( + direction, + coeffs, + sim.get(closest_pos.map2(TerrainChunkSize::RECT_SIZE, |e, sz: u32| { + e as i32 / sz as i32 + })) + .expect("Must already work"), + closest_t, + closest_pos, + closest_dist.sqrt(), + ) + } + }; + let river_width_max = + if let Some(RiverKind::River { cross_section }) = downhill_chunk.river.river_kind { + cross_section.x as f64 + } else { + lake_width + }; + let river_width_noise = (sim.gen_ctx.small_nz.get((river_pos.div(16.0)).into_array())) + .max(-1.0) + .min(1.0) + .mul(0.5) + .sub(0.5) as f64; + let river_width = Lerp::lerp( + river_width_min, + river_width_max, + river_t.max(0.0).min(1.0).powf(0.5), + ); - // Never used - //const RIVER_PROPORTION: f32 = 0.025; + let river_width = river_width * (1.0 + river_width_noise * 0.3); + // To find the distance, we just evaluate the quadratic equation at river_t and see + // if it's within width (but we should be able to use it for a lot more, and this + // probably isn't the very best approach anyway since it will bleed out). + // let river_pos = coeffs.x * river_t * river_t + coeffs.y * river_t + coeffs.z; + let res = Vec2::new(0.0, (river_dist - (river_width * 0.5).max(1.0)).max(0.0)); + ( + posj, + chunkj, + river, + Some(( + direction, + res, + river_width, + (river_t, (river_pos, coeffs), downhill_chunk), + )), + ) + }); - /* - let river = dryness - .abs() - .neg() - .add(RIVER_PROPORTION) - .div(RIVER_PROPORTION) - .max(0.0) - .mul((1.0 - (chaos - 0.15) * 20.0).max(0.0).min(1.0)); - */ - let river = 0.0; + // Find the average distance to each neighboring body of water. + let mut river_count = 0.0f64; + let mut overlap_count = 0.0f64; + let mut river_distance_product = 1.0f64; + let mut river_overlap_distance_product = 0.0f64; + let mut max_river = None; + let mut max_key = None; + // IDEA: + // For every "nearby" chunk, check whether it is a river. If so, find the closest point on + // the river segment to wposf (if two point are equidistant, choose the earlier one), + // calling this point river_pos and the length (from 0 to 1) along the river segment for + // the nearby chunk river_t. Let river_dist be the distance from river_pos to wposf. + // + // Let river_alt be the interpolated river height at this point + // (from the alt/water altitude at the river, to the alt/water_altitude of the downhill + // river, increasing with river_t). + // + // Now, if river_dist is <= river_width * 0.5, then we don't care what altitude we use, and + // mark that we are on a river (we decide what river to use using a heuristic, and set the + // solely according to the computed river_alt for that point). + // + // Otherwise, we let dist = river_dist - river_width * 0.5. + // + // If dist >= TerrainChunkSize::RECT_SIZE.x, we don't include this river in the calculation + // of the correct altitude for this point. + // + // Otherwise (i.e. dist < TerrainChunkSize::RECT_SIZE.x), we want to bias the altitude of + // this point towards the altitude of the river. Specifically, as the dist goes from + // TerrainChunkSize::RECT_SIZE.x to 0, the weighted altitude of this point should go from + // alt to river_alt. + for (river_chunk_idx, river_chunk, river, dist) in neighbor_river_data { + match river.river_kind { + Some(kind) => { + if kind.is_river() && !dist.is_some() { + // Ostensibly near a river segment, but not "usefully" so (there is no + // closest point between t = 0.0 and t = 1.0). + continue; + } else { + let river_dist = dist.map(|(_, dist, _, (river_t, _, downhill_river))| { + let downhill_height = if kind.is_river() { + Lerp::lerp( + river_chunk.alt.max(river_chunk.water_alt), + downhill_river.alt.max(downhill_river.water_alt), + river_t as f32, + ) as f64 + } else { + let neighbor_pos = + river_chunk_idx.map(|e| e as f64) * neighbor_coef; + if dist.y == 0.0 { + -(wposf - neighbor_pos).magnitude() + } else { + -(wposf - neighbor_pos).magnitude() + } + }; + (Reverse((dist.x, dist.y)), downhill_height) + }); + let river_dist = river_dist.or_else(|| { + if !kind.is_river() { + let neighbor_pos = + river_chunk_idx.map(|e| e as f64) * neighbor_coef; + let dist = (wposf - neighbor_pos).magnitude(); + let dist_upon = + (dist - TerrainChunkSize::RECT_SIZE.x as f64 * 0.5).max(0.0); + let dist_ = if dist == 0.0 { f64::INFINITY } else { -dist }; + Some((Reverse((0.0, dist_upon)), dist_)) + } else { + None + } + }); + let river_key = (river_dist, Reverse(kind)); + if max_key < Some(river_key) { + max_river = Some((river_chunk_idx, river_chunk, river, dist)); + max_key = Some(river_key); + } + } + + // NOTE: we scale by the distance to the river divided by the difference + // between the edge of the river that we intersect, and the remaining distance + // until the nearest point in "this" chunk (i.e. the one whose top-left corner + // is chunk_pos) that is at least 2 chunks away from the river source. + if let Some((_, dist, _, (river_t, _, downhill_river_chunk))) = dist { + let max_distance = if !river.is_river() { + /*(*/ + TerrainChunkSize::RECT_SIZE.x as f64 /* * (1.0 - (2.0f64.sqrt() / 2.0))) + 4.0*/ - lake_width * 0.5 + } else { + TerrainChunkSize::RECT_SIZE.x as f64 + }; + let scale_factor = max_distance; + let river_dist = dist.y; + + if !(dist.x == 0.0 && river_dist < scale_factor) { + continue; + } + // We basically want to project outwards from river_pos, along the current + // tangent line, to chunks <= river_width * 1.0 away from this + // point. We *don't* want to deal with closer chunks because they + + // NOTE: river_width <= 2 * max terrain chunk size width, so this should not + // lead to division by zero. + // NOTE: If distance = 0.0 this goes to zero, which is desired since it + // means points that actually intersect with rivers will not be interpolated + // with the "normal" height of this point. + // NOTE: We keep the maximum at 1.0 so we don't undo work from another river + // just by being far away. + let river_scale = river_dist / scale_factor; + let river_alt = + Lerp::lerp(river_chunk.alt, downhill_river_chunk.alt, river_t as f32); + let river_alt = Lerp::lerp(river_alt, alt, river_scale as f32); + let river_alt_diff = river_alt - alt; + let river_alt_inv = river_alt_diff as f64; + river_overlap_distance_product += (1.0 - river_scale) * river_alt_inv; + overlap_count += 1.0 - river_scale; + river_count += 1.0; + river_distance_product *= river_scale; + } + } + None => {} + } + } + let river_scale_factor = if river_count == 0.0 { + 1.0 + } else { + let river_scale_factor = river_distance_product; + if river_scale_factor == 0.0 { + 0.0 + } else { + river_scale_factor.powf(if river_count == 0.0 { + 1.0 + } else { + 1.0 / river_count + }) + } + }; + + let alt_for_river = alt + + if overlap_count == 0.0 { + 0.0 + } else { + river_overlap_distance_product / overlap_count + } as f32; let cliff_hill = (sim .gen_ctx @@ -164,14 +555,13 @@ impl<'a> Sampler<'a> for ColumnGen<'a> { .get((wposf_turb.div(128.0)).into_array()) as f32) .mul(24.0); - let riverless_alt = sim.get_interpolated(wpos, |chunk| chunk.alt)? - + (sim - .gen_ctx - .small_nz - .get((wposf_turb.div(200.0)).into_array()) as f32) - .abs() - .mul(chaos.max(0.05)) - .mul(55.0) + let riverless_alt_delta = (sim + .gen_ctx + .small_nz + .get((wposf_turb.div(200.0)).into_array()) as f32) + .abs() + .mul(chaos.max(0.05)) + .mul(27.0) + (sim .gen_ctx .small_nz @@ -179,20 +569,239 @@ impl<'a> Sampler<'a> for ColumnGen<'a> { .abs() .mul((1.0 - chaos).max(0.3)) .mul(1.0 - humidity) - .mul(65.0); + .mul(32.0); + + let downhill = sim_chunk.downhill; + let downhill_pos = downhill.and_then(|downhill_pos| sim.get(downhill_pos)); + debug_assert!(sim_chunk.water_alt >= CONFIG.sea_level); + + let downhill_water_alt = downhill_pos + .map(|downhill_chunk| { + downhill_chunk + .water_alt + .min(sim_chunk.water_alt) + .max(sim_chunk.alt.min(sim_chunk.water_alt)) + }) + .unwrap_or(CONFIG.sea_level); let is_cliffs = sim_chunk.is_cliffs; let near_cliffs = sim_chunk.near_cliffs; - let alt = riverless_alt - - (1.0 - river) - .mul(f32::consts::PI) - .cos() - .add(1.0) - .mul(0.5) - .mul(24.0); + let river_gouge = 0.5; + let (in_water, alt_, water_level, warp_factor) = if let Some(( + max_border_river_pos, + river_chunk, + max_border_river, + max_border_river_dist, + )) = max_river + { + // This is flowing into a lake, or a lake, or is at least a non-ocean tile. + // + // If we are <= water_alt, we are in the lake; otherwise, we are flowing into it. + let (in_water, new_alt, new_water_alt, warp_factor) = max_border_river + .river_kind + .and_then(|river_kind| { + if let RiverKind::River { cross_section } = river_kind { + if max_border_river_dist.map(|(_, dist, _, _)| dist) != Some(Vec2::zero()) { + return None; + } + let (_, _, river_width, (river_t, (river_pos, _), downhill_river_chunk)) = + max_border_river_dist.unwrap(); + let river_alt = Lerp::lerp( + river_chunk.alt.max(river_chunk.water_alt), + downhill_river_chunk.alt.max(downhill_river_chunk.water_alt), + river_t as f32, + ); + let new_alt = river_alt - river_gouge; + let river_dist = wposf.distance(river_pos); + let river_height_factor = river_dist / (river_width * 0.5); - let water_level = riverless_alt - 4.0 - 5.0 * chaos; + Some(( + true, + Lerp::lerp( + new_alt - cross_section.y.max(1.0), + new_alt - 1.0, + (river_height_factor * river_height_factor) as f32, + ), + new_alt, + 0.0, + )) + } else { + None + } + }) + .unwrap_or_else(|| { + max_border_river + .river_kind + .and_then(|river_kind| { + match river_kind { + RiverKind::Ocean => { + let ( + _, + dist, + river_width, + (river_t, (river_pos, _), downhill_river_chunk), + ) = if let Some(dist) = max_border_river_dist { + dist + } else { + log::error!( + "Ocean: {:?} Here: {:?}, Ocean: {:?}", + max_border_river, + chunk_pos, + max_border_river_pos + ); + panic!( + "Oceans should definitely have a downhill! ...Right?" + ); + }; + let lake_water_alt = Lerp::lerp( + river_chunk.alt.max(river_chunk.water_alt), + downhill_river_chunk + .alt + .max(downhill_river_chunk.water_alt), + river_t as f32, + ); + + if dist == Vec2::zero() { + let river_dist = wposf.distance(river_pos); + let _river_height_factor = river_dist / (river_width * 0.5); + return Some(( + true, + alt_for_river.min(lake_water_alt - 1.0 - river_gouge), + lake_water_alt - river_gouge, + 0.0, + )); + } + + Some(( + river_scale_factor <= 1.0, + alt_for_river, + downhill_water_alt, + river_scale_factor as f32, + )) + } + RiverKind::Lake { .. } => { + let lake_dist = (max_border_river_pos.map(|e| e as f64) + * neighbor_coef) + .distance(wposf); + let downhill_river_chunk = max_border_river_pos; + let lake_id_dist = downhill_river_chunk - chunk_pos; + let in_bounds = lake_id_dist.x >= -1 + && lake_id_dist.y >= -1 + && lake_id_dist.x <= 1 + && lake_id_dist.y <= 1; + let in_bounds = + in_bounds && (lake_id_dist.x >= 0 && lake_id_dist.y >= 0); + let (_, dist, _, (river_t, _, downhill_river_chunk)) = + if let Some(dist) = max_border_river_dist { + dist + } else { + if lake_dist + <= TerrainChunkSize::RECT_SIZE.x as f64 * 1.0 + || in_bounds + { + let gouge_factor = 0.0; + return Some(( + in_bounds + || downhill_water_alt + .max(river_chunk.water_alt) + > alt_for_river, + alt_for_river, + (downhill_water_alt.max(river_chunk.water_alt) + - river_gouge), + river_scale_factor as f32 + * (1.0 - gouge_factor), + )); + } else { + return Some(( + false, + alt_for_river, + downhill_water_alt, + river_scale_factor as f32, + )); + } + }; + + let lake_dist = dist.y; + let lake_water_alt = Lerp::lerp( + river_chunk.alt.max(river_chunk.water_alt), + downhill_river_chunk + .alt + .max(downhill_river_chunk.water_alt), + river_t as f32, + ); + if dist == Vec2::zero() { + return Some(( + true, + alt_for_river.min(lake_water_alt - 1.0 - river_gouge), + lake_water_alt - river_gouge, + 0.0, + )); + } + if lake_dist <= TerrainChunkSize::RECT_SIZE.x as f64 * 1.0 + || in_bounds + { + let gouge_factor = if in_bounds && lake_dist <= 1.0 { + 1.0 + } else { + 0.0 + }; + let in_bounds_ = + lake_dist <= TerrainChunkSize::RECT_SIZE.x as f64 * 0.5; + if gouge_factor == 1.0 { + return Some(( + alt_for_river < lake_water_alt || in_bounds, + alt.min(lake_water_alt - 1.0 - river_gouge), + downhill_water_alt.max(lake_water_alt) + - river_gouge, + 0.0, + )); + } else { + return Some(( + alt_for_river < lake_water_alt || in_bounds, + alt_for_river, + if in_bounds_ { + downhill_water_alt.max(lake_water_alt) + - river_gouge + } else { + downhill_water_alt - river_gouge + }, + river_scale_factor as f32 * (1.0 - gouge_factor), + )); + } + } + Some(( + river_scale_factor <= 1.0, + alt_for_river, + downhill_water_alt, + river_scale_factor as f32, + )) + } + RiverKind::River { .. } => { + // FIXME: Make water altitude accurate. + Some(( + river_scale_factor <= 1.0, + alt_for_river, + downhill_water_alt, + river_scale_factor as f32, + )) + } + } + }) + .unwrap_or(( + false, + alt_for_river, + downhill_water_alt, + river_scale_factor as f32, + )) + }); + (in_water, new_alt, new_water_alt, warp_factor) + } else { + (false, alt_for_river, downhill_water_alt, 1.0) + }; + + let riverless_alt_delta = Lerp::lerp(0.0, riverless_alt_delta, warp_factor); + let alt = alt_ + riverless_alt_delta; let rock = (sim.gen_ctx.small_nz.get( Vec3::new(wposf.x, wposf.y, alt as f64) @@ -380,68 +989,6 @@ impl<'a> Sampler<'a> for ColumnGen<'a> { .add((marble_small - 0.5) * 0.5), ); - /* - // Work out if we're on a path or near a town - let dist_to_path = match &sim_chunk.location { - Some(loc) => { - let this_loc = &sim.locations[loc.loc_idx]; - this_loc - .neighbours - .iter() - .map(|j| { - let other_loc = &sim.locations[*j]; - - // Find the two location centers - let near_0 = this_loc.center.map(|e| e as f32); - let near_1 = other_loc.center.map(|e| e as f32); - - // Calculate distance to path between them - (0.0 + (near_1.y - near_0.y) * wposf_turb.x as f32 - - (near_1.x - near_0.x) * wposf_turb.y as f32 - + near_1.x * near_0.y - - near_0.x * near_1.y) - .abs() - .div(near_0.distance(near_1)) - }) - .filter(|x| x.is_finite()) - .min_by(|a, b| a.partial_cmp(b).unwrap()) - .unwrap_or(f32::INFINITY) - } - None => f32::INFINITY, - }; - - let on_path = dist_to_path < 5.0 && !sim_chunk.near_cliffs; // || near_0.distance(wposf_turb.map(|e| e as f32)) < 150.0; - - let (alt, ground) = if on_path { - (alt - 1.0, dirt) - } else { - (alt, ground) - }; - */ - - // Cities - // TODO: In a later MR - /* - let building = match &sim_chunk.location { - Some(loc) => { - let loc = &sim.locations[loc.loc_idx]; - let rpos = wposf.map2(loc.center, |a, b| a as f32 - b as f32) / 256.0 + 0.5; - - if rpos.map(|e| e >= 0.0 && e < 1.0).reduce_and() { - (loc.settlement - .get_at(rpos) - .map(|b| b.seed % 20 + 10) - .unwrap_or(0)) as f32 - } else { - 0.0 - } - } - None => 0.0, - }; - - let alt = alt + building; - */ - // Caves let cave_at = |wposf: Vec2| { (sim.gen_ctx.cave_0_nz.get( @@ -470,34 +1017,33 @@ impl<'a> Sampler<'a> for ColumnGen<'a> { .powf(15.0) .mul(150.0); + let near_ocean = max_river.and_then(|(_, _, river_data, _)| { + if (river_data.is_lake() || river_data.river_kind == Some(RiverKind::Ocean)) + && ((alt <= water_level.max(CONFIG.sea_level + 5.0) && !is_cliffs) || !near_cliffs) + { + Some(water_level) + } else { + None + } + }); + + let ocean_level = if let Some(_sea_level) = near_ocean { + alt - CONFIG.sea_level + } else { + 5.0 + }; + Some(ColumnSample { alt, chaos, water_level, - river, + warp_factor, surface_color: Rgb::lerp( sand, // Land - Rgb::lerp( - ground, - // Mountain - Rgb::lerp( - cliff, - snow, - (alt - CONFIG.sea_level - - 0.4 * CONFIG.mountain_scale - - alt_base - - temp * 96.0 - - marble * 24.0) - / 12.0, - ), - (alt - CONFIG.sea_level - 0.25 * CONFIG.mountain_scale + marble * 128.0) - / (0.25 * CONFIG.mountain_scale), - ), + ground, // Beach - ((alt - CONFIG.sea_level - 1.0) / 2.0) - .min(1.0 - river * 2.0) - .max(0.0), + ((ocean_level - 1.0) / 2.0).max(0.0), ), sub_surface_color: dirt, tree_density, @@ -523,7 +1069,11 @@ impl<'a> Sampler<'a> for ColumnGen<'a> { .town .as_ref() .map(|town| TownGen.spawn_rules(town, wpos)) - .unwrap_or(SpawnRules::default()), + .unwrap_or(SpawnRules::default()) + .and(SpawnRules { + cliffs: !in_water, + trees: true, + }), }) } } @@ -533,7 +1083,7 @@ pub struct ColumnSample<'a> { pub alt: f32, pub chaos: f32, pub water_level: f32, - pub river: f32, + pub warp_factor: f32, pub surface_color: Rgb, pub sub_surface_color: Rgb, pub tree_density: f32, diff --git a/world/src/config.rs b/world/src/config.rs index 22a488a7c3..1e5a372409 100644 --- a/world/src/config.rs +++ b/world/src/config.rs @@ -7,15 +7,54 @@ pub struct Config { pub desert_hum: f32, pub forest_hum: f32, pub jungle_hum: f32, + /// Rainfall (in meters) per chunk per minute. Default is set to make it approximately + /// 1 m rainfall / year uniformly across the whole land area, which is the average rainfall + /// on Earth. + pub rainfall_chunk_rate: f32, + /// Roughness coefficient is an empirical value that controls the rate of energy loss of water + /// in a river. The higher it is, the more water slows down as it flows downhill, which + /// consequently leads to lower velocities and higher river area for the same flow rate. + /// + /// See https://wwwrcamnl.wr.usgs.gov/sws/fieldmethods/Indirects/nvalues/index.htm. + /// + /// The default is set to over 0.06, which is pretty high but still within a reasonable range for + /// rivers. The higher this is, the quicker rivers appear, and since we often will have high + /// slopes we want to give rivers as much of a chance as possible. In the future we can set + /// this dynamically. + /// + /// NOTE: The values in the link are in seconds / (m^(-1/3)), but we use them without + /// conversion as though they are in minutes / (m^(-1/3)). The idea here is that our clock + /// speed has time go by at approximately 1 minute per second, but since velocity depends on + /// this parameter, we want flow rates to still "look" natural at the second level. The way we + /// are cheating is that we still allow the refill rate (via rainfall) of rivers and lakes to + /// be specified as though minutes are *really* minutes. This reduces the amount of water + /// needed to form a river of a given area by 60, but hopefully this should not feel too + /// unnatural since the refill rate is still below what people should be able to perceive. + pub river_roughness: f32, + /// Maximum width of rivers, in terms of a multiple of the horizontal chunk size. + /// + /// Currently, not known whether setting this above 1.0 will work properly. Please use with + /// care! + pub river_max_width: f32, + /// Minimum height at which rivers display. + pub river_min_height: f32, + /// Rough desired river width-to-depth ratio (in terms of horizontal chunk width / m, for some + /// reason). Not exact. + pub river_width_to_depth: f32, } pub const CONFIG: Config = Config { sea_level: 140.0, - mountain_scale: 1000.0, + mountain_scale: 2048.0, snow_temp: -0.6, tropical_temp: 0.2, desert_temp: 0.6, desert_hum: 0.15, forest_hum: 0.5, jungle_hum: 0.85, + rainfall_chunk_rate: 1.0 / 512.0, + river_roughness: 0.06125, + river_max_width: 2.0, + river_min_height: 0.25, + river_width_to_depth: 1.0, }; diff --git a/world/src/lib.rs b/world/src/lib.rs index 4773a8397e..233d77f06a 100644 --- a/world/src/lib.rs +++ b/world/src/lib.rs @@ -69,16 +69,18 @@ impl World { let stone = Block::new(BlockKind::Dense, Rgb::new(200, 220, 255)); let water = Block::new(BlockKind::Water, Rgb::new(60, 90, 190)); - let chunk_size2d = TerrainChunkSize::RECT_SIZE; + let _chunk_size2d = TerrainChunkSize::RECT_SIZE; let (base_z, sim_chunk) = match self .sim - .get_interpolated( + /*.get_interpolated( chunk_pos.map2(chunk_size2d, |e, sz: u32| e * sz as i32 + sz as i32 / 2), |chunk| chunk.get_base_z(), ) - .and_then(|base_z| self.sim.get(chunk_pos).map(|sim_chunk| (base_z, sim_chunk))) + .and_then(|base_z| self.sim.get(chunk_pos).map(|sim_chunk| (base_z, sim_chunk))) */ + .get_base_z(chunk_pos) { - Some((base_z, sim_chunk)) => (base_z as i32, sim_chunk), + Some(base_z) => (base_z as i32, self.sim.get(chunk_pos).unwrap()), + // Some((base_z, sim_chunk)) => (base_z as i32, sim_chunk), None => { return Ok(( TerrainChunk::new( diff --git a/world/src/sim/erosion.rs b/world/src/sim/erosion.rs new file mode 100644 index 0000000000..06420b722a --- /dev/null +++ b/world/src/sim/erosion.rs @@ -0,0 +1,1155 @@ +use super::{downhill, neighbors, uniform_idx_as_vec2, uphill, WORLD_SIZE}; +use crate::{config::CONFIG, util::RandomField}; +use common::{terrain::TerrainChunkSize, vol::RectVolSize}; +use noise::{NoiseFn, Point3}; +use num::Float; +use ordered_float::NotNan; +use rayon::prelude::*; +use std::{ + cmp::{Ordering, Reverse}, + collections::BinaryHeap, + f32, f64, mem, u32, +}; +use vek::*; + +/// Compute the water flux at all chunks, given a list of chunk indices sorted by increasing +/// height. +pub fn get_drainage(newh: &[u32], downhill: &[isize], _boundary_len: usize) -> Box<[f32]> { + // FIXME: Make the below work. For now, we just use constant flux. + // Initially, flux is determined by rainfall. We currently treat this as the same as humidity, + // so we just use humidity as a proxy. The total flux across the whole map is normalize to + // 1.0, and we expect the average flux to be 0.5. To figure out how far from normal any given + // chunk is, we use its logit. + // NOTE: If there are no non-boundary chunks, we just set base_flux to 1.0; this should still + // work fine because in that case there's no erosion anyway. + // let base_flux = 1.0 / ((WORLD_SIZE.x * WORLD_SIZE.y) as f32); + let base_flux = 1.0; + let mut flux = vec![base_flux; WORLD_SIZE.x * WORLD_SIZE.y].into_boxed_slice(); + for &chunk_idx in newh.into_iter().rev() { + let chunk_idx = chunk_idx as usize; + let downhill_idx = downhill[chunk_idx]; + if downhill_idx >= 0 { + flux[downhill_idx as usize] += flux[chunk_idx]; + } + } + flux +} + +/// Kind of water on this tile. +#[derive(Clone, Copy, Debug, PartialEq)] +pub enum RiverKind { + Ocean, + Lake { + /// In addition to a downhill node (pointing to, eventually, the bottom of the lake), each + /// lake also has a "pass" that identifies the direction out of which water should flow + /// from this lake if it is minimally flooded. While some lakes may be too full for this + /// to be the actual pass used by their enclosing lake, we still use this as a way to make + /// sure that lake connections to rivers flow in the correct direction. + neighbor_pass_pos: Vec2, + }, + /// River should be maximal. + River { + /// Dimensions of the river's cross-sectional area, as m × m (rivers are approximated + /// as an open rectangular prism in the direction of the velocity vector). + cross_section: Vec2, + }, +} + +impl RiverKind { + pub fn is_river(&self) -> bool { + if let RiverKind::River { .. } = *self { + true + } else { + false + } + } + + pub fn is_lake(&self) -> bool { + if let RiverKind::Lake { .. } = *self { + true + } else { + false + } + } +} + +impl PartialOrd for RiverKind { + fn partial_cmp(&self, other: &Self) -> Option { + match (*self, *other) { + (RiverKind::Ocean, RiverKind::Ocean) => Some(Ordering::Equal), + (RiverKind::Ocean, _) => Some(Ordering::Less), + (_, RiverKind::Ocean) => Some(Ordering::Greater), + (RiverKind::Lake { .. }, RiverKind::Lake { .. }) => None, + (RiverKind::Lake { .. }, _) => Some(Ordering::Less), + (_, RiverKind::Lake { .. }) => Some(Ordering::Greater), + (RiverKind::River { .. }, RiverKind::River { .. }) => None, + } + } +} + +/// From velocity and cross_section we can calculate the volumetric flow rate, as the +/// cross-sectional area times the velocity. +/// +/// TODO: we might choose to include a curve for the river, as long as it didn't allow it to +/// cross more than one neighboring chunk away. For now we defer this to rendering time. +/// +/// NOTE: This structure is 57 (or more likely 64) bytes, which is kind of big. +#[derive(Clone, Debug, Default)] +pub struct RiverData { + /// A velocity vector (in m / minute, i.e. voxels / second from a game perspective). + /// + /// TODO: To represent this in a better-packed way, use u8s instead (as "f8s"). + pub(crate) velocity: Vec3, + /// The computed derivative for the segment of river starting at this chunk (and flowing + /// downhill). Should be 0 at endpoints. For rivers with more than one incoming segment, we + /// weight the derivatives by flux (cross-sectional area times velocity) which is correlated + /// with mass / second; treating the derivative as "velocity" with respect to length along the + /// river, we treat the weighted sum of incoming splines as the "momentum", and can divide it + /// by the total incoming mass as a way to find the velocity of the center of mass. We can + /// then use this derivative to find a "tangent" for the incoming river segment at this point, + /// and as the linear part of the interpolating spline at this point. + /// + /// Note that we aren't going to have completely smooth curves here anyway, so we will probably + /// end up applying a dampening factor as well (maybe based on the length?) to prevent + /// extremely wild oscillations. + pub(crate) spline_derivative: Vec2, + /// If this chunk is part of a river, this should be true. We can't just compute this from the + /// cross section because once a river becomes visible, we want it to stay visible until it + /// reaches its sink. + pub river_kind: Option, + /// We also have a second record for recording any rivers in nearby chunks that manage to + /// intersect this chunk, though this is unlikely to happen in current gameplay. This is + /// because river areas are allowed to cross arbitrarily many chunk boundaries, if they are + /// wide enough. In such cases we may choose to render the rivers as particularly deep in + /// those places. + pub(crate) neighbor_rivers: Vec, +} + +impl RiverData { + pub fn is_river(&self) -> bool { + self.river_kind + .as_ref() + .map(RiverKind::is_river) + .unwrap_or(false) + } + + pub fn is_lake(&self) -> bool { + self.river_kind + .as_ref() + .map(RiverKind::is_lake) + .unwrap_or(false) + } +} + +/// Draw rivers and assign them heights, widths, and velocities. Take some liberties with the +/// constant factors etc. in order to make it more likely that we draw rivers at all. +pub fn get_rivers( + newh: &[u32], + water_alt: &[f32], + downhill: &[isize], + indirection: &[i32], + drainage: &[f32], +) -> Box<[RiverData]> { + // For continuity-preserving quadratic spline interpolation, we (appear to) need to build + // up the derivatives from the top down. Fortunately this computation seems tractable. + + let mut rivers = vec![RiverData::default(); WORLD_SIZE.x * WORLD_SIZE.y].into_boxed_slice(); + let neighbor_coef = TerrainChunkSize::RECT_SIZE.map(|e| e as f64); + // NOTE: This technically makes us discontinuous, so we should be cautious about using this. + let derivative_divisor = 1.0; + for &chunk_idx in newh.into_iter().rev() { + let chunk_idx = chunk_idx as usize; + let downhill_idx = downhill[chunk_idx]; + if downhill_idx < 0 { + // We are in the ocean. + debug_assert!(downhill_idx == -2); + rivers[chunk_idx].river_kind = Some(RiverKind::Ocean); + continue; + } + let downhill_idx = downhill_idx as usize; + let downhill_pos = uniform_idx_as_vec2(downhill_idx); + let dxy = (downhill_pos - uniform_idx_as_vec2(chunk_idx)).map(|e| e as f64); + let neighbor_dim = neighbor_coef * dxy; + // First, we calculate the river's volumetric flow rate. + let chunk_drainage = drainage[chunk_idx] as f64; + // Volumetric flow rate is just the total drainage area to this chunk, times rainfall + // height per chunk per minute (needed in order to use this as a m³ volume). + // TODO: consider having different rainfall rates (and including this information in the + // computation of drainage). + let volumetric_flow_rate = chunk_drainage * CONFIG.rainfall_chunk_rate as f64; + let downhill_drainage = drainage[downhill_idx] as f64; + + // We know the drainage to the downhill node is just chunk_drainage - 1.0 (the amount of + // rainfall this chunk is said to get), so we don't need to explicitly remember the + // incoming mass. How do we find a slope for endpoints where there is no incoming data? + // Currently, we just assume it's set to 0.0. + // TODO: Fix this when we add differing amounts of rainfall. + let incoming_drainage = downhill_drainage - 1.0; + let get_river_spline_derivative = + |neighbor_dim: Vec2, spline_derivative: Vec2| { + /*if incoming_drainage == 0.0 { + Vec2::zero() + } else */ + { + // "Velocity of center of mass" of splines of incoming flows. + let river_prev_slope = spline_derivative.map(|e| e as f64)/* / incoming_drainage*/; + // NOTE: We need to make sure the slope doesn't get *too* crazy. + // ((dpx - cx) - 4 * MAX).abs() = bx + // NOTE: This will fail if the distance between chunks in any direction + // is exactly TerrainChunkSize::RECT * 4.0, but hopefully this should not be possible. + // NOTE: This isn't measuring actual distance, you can go farther on diagonals. + // let max_deriv = neighbor_dim - neighbor_coef * 4.0; + let max_deriv = neighbor_dim - neighbor_coef * 2.0 * 2.0.sqrt(); + let extra_divisor = river_prev_slope + .map2(max_deriv, |e, f| (e / f).abs()) + .reduce_partial_max(); + // Set up the river's spline derivative. For each incoming river at pos with + // river_spline_derivative bx, we can compute our interpolated slope as: + // d_x = 2 * (chunk_pos - pos - bx) + bx + // = 2 * (chunk_pos - pos) - bx + // + // which is exactly twice what was weighted by uphill nodes to get our + // river_spline_derivative in the first place. + // + // NOTE: this probably implies that the distance shouldn't be normalized, since the + // distances aren't actually equal between x and y... we'll see what happens. + (if extra_divisor > 1.0 { + river_prev_slope / extra_divisor + } else { + river_prev_slope + }) + .map(|e| e as f32) + } + }; + + let river = &rivers[chunk_idx]; + let river_spline_derivative = + get_river_spline_derivative(neighbor_dim, river.spline_derivative); + + let indirection_idx = indirection[chunk_idx]; + // Find the lake we are flowing into. + let lake_idx = if indirection_idx < 0 { + // If we're a lake bottom, our own indirection is negative. + let mut lake = &mut rivers[chunk_idx]; + let neighbor_pass_idx = downhill_idx; + // Mass flow from this lake is treated as a weighting factor (this is currently + // considered proportional to drainage, but in the direction of "lake side of pass to + // pass."). + let neighbor_pass_pos = downhill_pos; + lake.river_kind = Some(RiverKind::Lake { + neighbor_pass_pos: neighbor_pass_pos + * TerrainChunkSize::RECT_SIZE.map(|e| e as i32), + }); + lake.spline_derivative = Vec2::zero()/*river_spline_derivative*/; + let pass_idx = (-indirection_idx) as usize; + let pass_pos = uniform_idx_as_vec2(pass_idx); + let lake_direction = neighbor_coef * (neighbor_pass_pos - pass_pos).map(|e| e as f64); + // Our side of the pass must have already been traversed (even if our side of the pass + // is the lake bottom), so we acquire its computed river_spline_derivative. + let pass = &rivers[pass_idx]; + debug_assert!(pass.is_lake()); + let pass_spline_derivative = pass.spline_derivative.map(|e| e as f64)/*Vec2::zero()*/; + // Normally we want to not normalize, but for the lake we don't want to generate a + // super long edge since it could lead to a lot of oscillation... this is another + // reason why we shouldn't use the lake bottom. + // lake_direction.normalize(); + // We want to assign the drained node from any lake to be a river. + let lake_drainage = /*drainage[chunk_idx]*/chunk_drainage; + let lake_neighbor_pass_incoming_drainage = incoming_drainage; + let weighted_flow = (lake_direction * 2.0 - pass_spline_derivative) + / derivative_divisor + * lake_drainage + / lake_neighbor_pass_incoming_drainage; + let mut lake_neighbor_pass = &mut rivers[neighbor_pass_idx]; + // We definitely shouldn't have encountered this yet! + debug_assert!(lake_neighbor_pass.velocity == Vec3::zero()); + // TODO: Rethink making the lake neighbor pass always a river or lake, no matter how + // much incoming water there is? Sometimes it looks weird having a river emerge from a + // tiny pool. + lake_neighbor_pass.river_kind = Some(RiverKind::River { + cross_section: Vec2::default(), + }); + // We also want to add to the out-flow side of the pass a (flux-weighted) + // derivative coming from the lake center. + // + // NOTE: Maybe consider utilizing 3D component of spline somehow? Currently this is + // basically a flat vector, but that might be okay from lake to other side of pass. + lake_neighbor_pass.spline_derivative += /*Vec2::new(weighted_flow.x, weighted_flow.y)*/ + weighted_flow.map(|e| e as f32); + continue; + } else { + indirection_idx as usize + }; + // Add our spline derivative to the downhill river (weighted by the chunk's drainage). + // + // TODO: consider utilizing height difference component of flux as well; currently we + // just discard it in figuring out the spline's slope. + let downhill_river = &mut rivers[downhill_idx]; + let weighted_flow = (neighbor_dim * 2.0 - river_spline_derivative.map(|e| e as f64)) + / derivative_divisor + * chunk_drainage + / incoming_drainage; + downhill_river.spline_derivative += weighted_flow.map(|e| e as f32); + + // Find the pass this lake is flowing into (i.e. water at the lake bottom gets + // pushed towards the point identified by pass_idx). + let neighbor_pass_idx = downhill[lake_idx]; + // Find our own water height. + let chunk_water_alt = water_alt[chunk_idx]; + if neighbor_pass_idx >= 0 { + // We may be a river. But we're not sure yet, since we still could be + // underwater. Check the lake height and see if our own water height is within ε of + // it. + let pass_idx = (-indirection[lake_idx]) as usize; + let lake_water_alt = water_alt[lake_idx]; + if chunk_water_alt == lake_water_alt { + // Not a river. + // Check whether we we are the lake side of the pass. + // NOTE: Safe because this is a lake. + let (neighbor_pass_pos, river_spline_derivative) = if pass_idx == chunk_idx + /*true*/ + { + // This is a pass, so set our flow direction to point to the neighbor pass + // rather than downhill. + // NOTE: Safe because neighbor_pass_idx >= 0. + ( + uniform_idx_as_vec2(neighbor_pass_idx as usize), + river_spline_derivative, + ) + } else { + // Try pointing towards the lake side of the pass. + (uniform_idx_as_vec2(pass_idx), river_spline_derivative) + }; + let mut lake = &mut rivers[chunk_idx]; + lake.spline_derivative = river_spline_derivative; + lake.river_kind = Some(RiverKind::Lake { + neighbor_pass_pos: neighbor_pass_pos + * TerrainChunkSize::RECT_SIZE.map(|e| e as i32), + }); + continue; + } + // Otherwise, we must be a river. + } else { + // We are flowing into the ocean. + debug_assert!(neighbor_pass_idx == -2); + // But we are not the ocean, so we must be a river. + } + // Now, we know we are a river *candidate*. We still don't know whether we are actually a + // river, though. There are two ways for that to happen: + // (i) We are already a river. + // (ii) Using the Gauckler–Manning–Strickler formula for cross-sectional average velocity + // of water, we establish that the river can be "big enough" to appear on the Veloren + // map. + // + // This is very imprecise, of course, and (ii) may (and almost certainly will) change over + // time. + // + // In both cases, we preemptively set our child to be a river, to make sure we have an + // unbroken stream. Also in both cases, we go to the effort of computing an effective + // water velocity vector and cross-sectional dimensions, as well as figuring out the + // derivative of our interpolating spline (since this percolates through the whole river + // network). + let downhill_water_alt = water_alt[downhill_idx]; + let neighbor_distance = neighbor_dim.magnitude(); + let dz = (downhill_water_alt - chunk_water_alt) * CONFIG.mountain_scale; + let slope = dz.abs() as f64 / neighbor_distance; + if slope == 0.0 { + // This is not a river--how did this even happen? + let pass_idx = (-indirection_idx) as usize; + log::error!("Our chunk (and downhill, lake, pass, neighbor_pass): {:?} (to {:?}, in {:?} via {:?} to {:?}), chunk water alt: {:?}, lake water alt: {:?}", + uniform_idx_as_vec2(chunk_idx), + uniform_idx_as_vec2(downhill_idx), + uniform_idx_as_vec2(lake_idx), + uniform_idx_as_vec2(pass_idx), + if neighbor_pass_idx >= 0 { Some(uniform_idx_as_vec2(neighbor_pass_idx as usize)) } else { None }, + water_alt[chunk_idx], + water_alt[lake_idx]); + panic!("Should this happen at all?"); + } + let slope_sqrt = slope.sqrt(); + // Now, we compute a quantity that is proportional to the velocity of the chunk, derived + // from the Manning formula, equal to + // volumetric_flow_rate / slope_sqrt * CONFIG.river_roughness. + let almost_velocity = volumetric_flow_rate / slope_sqrt * CONFIG.river_roughness as f64; + // From this, we can figure out the width of the chunk if we know the height. For now, we + // hardcode the height to 0.5, but it should almost certainly be much more complicated than + // this. + // let mut height = 0.5f32; + // We approximate the river as a rectangular prism. Theoretically, we need to solve the + // following quintic equation to determine its width from its height: + // + // h^5 * w^5 = almost_velocity^3 * (w + 2 * h)^2. + // + // This is because one of the quantities in the Manning formula (the unknown) is R_h = + // (area of cross-section / h)^(2/3). + // + // Unfortunately, quintic equations do not in general have algebraic solutions, and it's + // not clear (to me anyway) that this one does in all cases. + // + // In practice, for high ratios of width to height, we can approximate the rectangular + // prism's perimeter as equal to its width, so R_h as equal to height. This greatly + // simplifies the calculation. For simplicity, we do this even for low ratios of width to + // height--I found that for most real rivers, at least big ones, this approximation is + // "good enough." We don't need to be *that* realistic :P + // + // NOTE: Derived from a paper on estimating river width. + let mut width = 5.0 + * (CONFIG.river_width_to_depth as f64 + * (CONFIG.river_width_to_depth as f64 + 2.0).powf(2.0 / 3.0)) + .powf(3.0 / 8.0) + * volumetric_flow_rate.powf(3.0 / 8.0) + * slope.powf(-3.0 / 16.0) + * (CONFIG.river_roughness as f64).powf(3.0 / 8.0); + width = width.max(0.0); + + let mut height = if width == 0.0 { + CONFIG.river_min_height as f64 + } else { + (almost_velocity / width).powf(3.0 / 5.0) + }; + + // We can now weight the river's drainage by its direction, which we use to help improve + // the slope of the downhill node. + let river_direction = Vec3::new( + neighbor_dim.x, + neighbor_dim.y, + (dz as f64).signum() * (dz as f64), + ); + + // Now, we can check whether this is "really" a river. + // Currently, we just check that width and height are at least 0.5 and + // CONFIG.river_min_height. + let river = &rivers[chunk_idx]; + let is_river = river.is_river() || width >= 0.5 && height >= CONFIG.river_min_height as f64; + let mut downhill_river = &mut rivers[downhill_idx]; + + if is_river { + // Provisionally make the downhill chunk a river as well. + downhill_river.river_kind = Some(RiverKind::River { + cross_section: Vec2::default(), + }); + + // Additionally, if the cross-sectional area for this river exceeds the max river + // width, the river is overflowing the two chunks adjacent to it, which we'd prefer to + // avoid since only its two immediate neighbors (orthogonal to the downhill direction) + // are guaranteed uphill of it. + // Solving this properly most likely requires modifying the erosion model to + // take channel width into account, which is a formidable task that likely requires + // rethinking the current grid-based erosion model (or at least, requires tracking some + // edges that aren't implied by the grid graph). For now, we will solve this problem + // by making the river deeper when it hits the max width, until it consumes all the + // available energy in this part of the river. + let max_width = TerrainChunkSize::RECT_SIZE.x as f64 * CONFIG.river_max_width as f64; + if width > max_width { + width = max_width; + height = (almost_velocity / width).powf(3.0 / 5.0); + } + } + // Now we can compute the river's approximate velocity magnitude as well, as + let velocity_magnitude = + 1.0 / CONFIG.river_roughness as f64 * height.powf(2.0 / 3.0) * slope_sqrt; + + // Set up the river's cross-sectional area. + let cross_section = Vec2::new(width as f32, height as f32); + // Set up the river's velocity vector. + let mut velocity = river_direction; + velocity.normalize(); + velocity *= velocity_magnitude; + + let mut river = &mut rivers[chunk_idx]; + // NOTE: Not trying to do this more cleverly because we want to keep the river's neighbors. + // TODO: Actually put something in the neighbors. + river.velocity = velocity.map(|e| e as f32); + river.spline_derivative = river_spline_derivative; + river.river_kind = if is_river { + Some(RiverKind::River { cross_section }) + } else { + None + }; + } + rivers +} + +/// Precompute the maximum slope at all points. +/// +/// TODO: See if allocating in advance is worthwhile. +fn get_max_slope(h: &[f32], rock_strength_nz: &(impl NoiseFn> + Sync)) -> Box<[f32]> { + const MIN_MAX_ANGLE: f64 = 6.0 / 360.0 * 2.0 * f64::consts::PI; + const MAX_MAX_ANGLE: f64 = 54.0 / 360.0 * 2.0 * f64::consts::PI; + const MAX_ANGLE_RANGE: f64 = MAX_MAX_ANGLE - MIN_MAX_ANGLE; + h.par_iter() + .enumerate() + .map(|(posi, &z)| { + let wposf = (uniform_idx_as_vec2(posi)).map(|e| e as f64); + let wposz = z as f64 * CONFIG.mountain_scale as f64; + // Normalized to be between 6 and and 54 degrees. + let div_factor = 32.0; + let rock_strength = rock_strength_nz + .get([wposf.x / div_factor, wposf.y / div_factor, wposz]) + .max(-1.0) + .min(1.0) + * 0.5 + + 0.5; + // Powering rock_strength^((1.25 - z)^6) means the maximum angle increases with z, but + // not too fast. At z = 0.25 the angle is not affected at all, below it the angle is + // lower, and above it the angle is higher. + // + // Logistic regression. Make sure x ∈ (0, 1). + let logit = |x: f64| x.ln() - (-x).ln_1p(); + // 0.5 + 0.5 * tanh(ln(1 / (1 - 0.1) - 1) / (2 * (sqrt(3)/pi))) + let logistic_2_base = 3.0f64.sqrt() * f64::consts::FRAC_2_PI; + // Assumes μ = 0, σ = 1 + let logistic_cdf = |x: f64| (x / logistic_2_base).tanh() * 0.5 + 0.5; + + // We do log-odds against center, so that our log odds are 0 when x = 0.25, lower when x is + // lower, and higher when x is higher. + // + // TODO: Make all this stuff configurable... but honestly, it's so complicated that I'm not + // sure anyone would be able to usefully tweak them on a per-map basis? Plus it's just a + // hacky heuristic anyway. + let center = 0.25; + let delta = 0.05; + let dmin = center - delta; + let dmax = center + delta; + let log_odds = |x: f64| logit(x) - logit(center); + let rock_strength = logistic_cdf( + 1.0 * logit(rock_strength.min(1.0f64 - 1e-7).max(1e-7)) + + 1.0 * log_odds((z as f64).min(dmax).max(dmin)), + ); + let max_slope = (rock_strength * MAX_ANGLE_RANGE + MIN_MAX_ANGLE).tan(); + max_slope as f32 + }) + .collect::>() + .into_boxed_slice() +} + +/// Erode all chunks by amount. +/// +/// Our equation is: +/// +/// dh(p) / dt = uplift(p)−k * A(p)^m * slope(p)^n +/// +/// where A(p) is the drainage area at p, m and n are constants +/// (we choose m = 0.5 and n = 1), and k is a constant. We choose +/// +/// k = 2.244 * uplift.max() / (desired_max_height) +/// +/// since this tends to produce mountains of max height desired_max_height; and we set +/// desired_max_height = 1.0 to reflect limitations of mountain scale. +/// +/// This algorithm does this in four steps: +/// +/// 1. Sort the nodes in h by height (so the lowest node by altitude is first in the +/// list, and the highest node by altitude is last). +/// 2. Iterate through the list in *reverse.* For each node, we compute its drainage area as +/// the sum of the drainage areas of its "children" nodes (i.e. the nodes with directed edges to +/// this node). To do this efficiently, we start with the "leaves" (the highest nodes), which +/// have no neighbors higher than them, hence no directed edges to them. We add their area to +/// themselves, and then to all neighbors that they flow into (their "ancestors" in the flow +/// graph); currently, this just means the node immediately downhill of this node. +/// As we go lower, we know that all our "children" already had their areas computed, which +/// means that we can repeat the process in order to derive all the final areas. +/// 3. Now, iterate through the list in *order.* Whether we used the filling method to compute a +/// "filled" version of each depression, or used the lake connection algoirthm described in [1], +/// each node is guaranteed to have zero or one drainage edges out, representing the direction +/// of water flow for that node. For nodes i with zero drainage edges out (boundary nodes and +/// lake bottoms) we set the slope to 0 (so the change in altitude is uplift(i)) +/// For nodes with at least one drainage edge out, we take advantage of the fact that we are +/// computing new heights in order and rewrite our equation as (letting j = downhill[i], A[i] +/// be the computed area of point i, p(i) be the x-y position of point i, +/// flux(i) = k * A[i]^m / ((p(i) - p(j)).magnitude()), and δt = 1): +/// +/// h[i](t + dt) = h[i](t) + δt * (uplift[i] + flux(i) * h[j](t + δt)) / (1 + flux(i) * δt). +/// +/// Since we compute heights in ascending order by height, and j is downhill from i, h[j] will +/// always be the *new* h[j](t + δt), while h[i] will still not have been computed yet, so we +/// only need to visit each node once. +/// +/// [1] Guillaume Cordonnier, Jean Braun, Marie-Paule Cani, Bedrich Benes, Eric Galin, et al.. +/// Large Scale Terrain Generation from Tectonic Uplift and Fluvial Erosion. +/// Computer Graphics Forum, Wiley, 2016, Proc. EUROGRAPHICS 2016, 35 (2), pp.165-175. +/// ⟨10.1111/cgf.12820⟩. ⟨hal-01262376⟩ +/// +fn erode( + h: &mut [f32], + erosion_base: f32, + max_uplift: f32, + _seed: &RandomField, + rock_strength_nz: &(impl NoiseFn> + Sync), + uplift: impl Fn(usize) -> f32, + is_ocean: impl Fn(usize) -> bool + Sync, +) { + log::info!("Done draining..."); + let mmaxh = 1.0; + let k = erosion_base as f64 + 2.244 / mmaxh as f64 * max_uplift as f64; + let ((dh, indirection, newh, area), max_slope) = rayon::join( + || { + let mut dh = downhill(h, |posi| is_ocean(posi)); + log::info!("Computed downhill..."); + let (boundary_len, indirection, newh) = get_lakes(&h, &mut dh); + log::info!("Got lakes..."); + let area = get_drainage(&newh, &dh, boundary_len); + log::info!("Got flux..."); + (dh, indirection, newh, area) + }, + || { + let max_slope = get_max_slope(h, rock_strength_nz); + log::info!("Got max slopes..."); + max_slope + }, + ); + + assert!(h.len() == dh.len() && dh.len() == area.len()); + // max angle of slope depends on rock strength, which is computed from noise function. + let neighbor_coef = TerrainChunkSize::RECT_SIZE.map(|e| e as f64); + let chunk_area = neighbor_coef.x * neighbor_coef.y; + // Iterate in ascending height order. + let mut maxh = 0.0; + let mut nland = 0usize; + let mut sums = 0.0; + let mut sumh = 0.0; + for &posi in &*newh { + let posi = posi as usize; + let posj = dh[posi]; + if posj < 0 { + if posj == -1 { + panic!("Disconnected lake!"); + } + // Egress with no outgoing flows. + } else { + nland += 1; + let posj = posj as usize; + let dxy = (uniform_idx_as_vec2(posi) - uniform_idx_as_vec2(posj)).map(|e| e as f64); + + // Has an outgoing flow edge (posi, posj). + // flux(i) = k * A[i]^m / ((p(i) - p(j)).magnitude()), and δt = 1 + let neighbor_distance = (neighbor_coef * dxy).magnitude(); + // Since the area is in meters^(2m) and neighbor_distance is in m, so long as m = 0.5, + // we have meters^(1) / meters^(1), so they should cancel out. Otherwise, we would + // want to divide neighbor_distance by CONFIG.mountain_scale and area[posi] by + // CONFIG.mountain_scale^2, to make sure we were working in the correct units for dz + // (which has 1.0 height unit = CONFIG.mountain_scale meters). + let flux = k * (chunk_area * area[posi] as f64).sqrt() / neighbor_distance; + // h[i](t + dt) = (h[i](t) + δt * (uplift[i] + flux(i) * h[j](t + δt))) / (1 + flux(i) * δt). + // NOTE: posj has already been computed since it's downhill from us. + let h_j = h[posj] as f64; + let old_h_i = h[posi] as f64; + let mut new_h_i = (old_h_i + (uplift(posi) as f64 + flux * h_j)) / (1.0 + flux); + // Find out if this is a lake bottom. + let indirection_idx = indirection[posi]; + let is_lake_bottom = indirection_idx < 0; + // Test the slope. + let max_slope = max_slope[posi] as f64; + let dz = (new_h_i - h_j) * CONFIG.mountain_scale as f64; + let mag_slope = dz.abs() / neighbor_distance; + let _fake_neighbor = is_lake_bottom && dxy.x.abs() > 1.0 && dxy.y.abs() > 1.0; + // If you're on the lake bottom and not right next to your neighbor, don't compute a + // slope. + if + /* !is_lake_bottom */ /* !fake_neighbor */ + true { + if + /* !is_lake_bottom && */ + mag_slope > max_slope { + // println!("old slope: {:?}, new slope: {:?}, dz: {:?}, h_j: {:?}, new_h_i: {:?}", mag_slope, max_slope, dz, h_j, new_h_i); + // Thermal erosion says this can't happen, so we reduce dh_i to make the slope + // exactly max_slope. + // max_slope = (old_h_i + dh - h_j) * CONFIG.mountain_scale / NEIGHBOR_DISTANCE + // dh = max_slope * NEIGHBOR_DISTANCE / CONFIG.mountain_scale + h_j - old_h_i. + let slope = dz.signum() * max_slope; + sums += max_slope; + new_h_i = slope * neighbor_distance / CONFIG.mountain_scale as f64 + h_j; + } else { + sums += mag_slope; + // Just use the computed rate. + } + h[posi] = new_h_i as f32; + sumh += new_h_i; + } + } + maxh = h[posi].max(maxh); + } + log::info!( + "Done eroding (max height: {:?}) (avg height: {:?}) (avg slope: {:?})", + maxh, + if nland == 0 { + f64::INFINITY + } else { + sumh / nland as f64 + }, + if nland == 0 { + f64::INFINITY + } else { + sums / nland as f64 + }, + ); +} + +/// The Planchon-Darboux algorithm for extracting drainage networks. +/// +/// http://horizon.documentation.ird.fr/exl-doc/pleins_textes/pleins_textes_7/sous_copyright/010031925.pdf +/// +/// See https://github.com/mewo2/terrain/blob/master/terrain.js +pub fn fill_sinks( + h: impl Fn(usize) -> f32 + Sync, + is_ocean: impl Fn(usize) -> bool + Sync, +) -> Box<[f32]> { + // NOTE: We are using the "exact" version of depression-filling, which is slower but doesn't + // change altitudes. + let epsilon = /*1.0 / (1 << 7) as f32 / CONFIG.mountain_scale*/0.0; + let infinity = f32::INFINITY; + let range = 0..WORLD_SIZE.x * WORLD_SIZE.y; + let oldh = range + .into_par_iter() + .map(|posi| h(posi)) + .collect::>() + .into_boxed_slice(); + let mut newh = oldh + .par_iter() + .enumerate() + .map(|(posi, &h)| { + let is_near_edge = is_ocean(posi); + if is_near_edge { + h + } else { + infinity + } + }) + .collect::>() + .into_boxed_slice(); + + loop { + let mut changed = false; + for posi in 0..newh.len() { + let nh = newh[posi]; + let oh = oldh[posi]; + if nh == oh { + continue; + } + for nposi in neighbors(posi) { + let onbh = newh[nposi]; + let nbh = onbh + epsilon; + // If there is even one path downhill from this node's original height, fix + // the node's new height to be equal to its original height, and break out of the + // loop. + if oh >= nbh { + newh[posi] = oh; + changed = true; + break; + } + // Otherwise, we know this node's original height is below the new height of all of + // its neighbors. Then, we try to choose the minimum new height among all this + // node's neighbors that is (plus a constant epislon) below this node's new height. + // + // (If there is no such node, then the node's new height must be (minus a constant + // epsilon) lower than the new height of every neighbor, but above its original + // height. But this can't be true for *all* nodes, because if this is true for any + // node, it is not true of any of its neighbors. So all neighbors must either be + // their original heights, or we will have another iteration of the loop (one of + // its neighbors was changed to its minimum neighbor). In the second case, in the + // next round, all neighbor heights will be at most nh + epsilon). + if nh > nbh && nbh > oh { + newh[posi] = nbh; + changed = true; + } + } + } + if !changed { + return newh; + } + } +} + +/// Computes which tiles are ocean tiles by + +/// Algorithm for finding and connecting lakes. Assumes newh and downhill have already +/// been computed. When a lake's value is negative, it is its own lake root, and when it is 0, it +/// is on the boundary of Ω. +/// +/// Returns a 4-tuple containing: +/// - The first indirection vector (associating chunk indices with their lake's root node). +/// - A list of chunks on the boundary (non-lake egress points). +/// - The second indirection vector (associating chunk indices with their lake's adjacency list). +/// - The adjacency list (stored in a single vector), indexed by the second indirection vector. +pub fn get_lakes(h: &[f32], downhill: &mut [isize]) -> (usize, Box<[i32]>, Box<[u32]>) { + // Associates each lake index with its root node (the deepest one in the lake), and a list of + // adjacent lakes. The list of adjacent lakes includes the lake index of the adjacent lake, + // and a node index in the adjacent lake which has a neighbor in this lake. The particular + // neighbor should be the one that generates the minimum "pass height" encountered so far, + // i.e. the chosen pair should minimize the maximum of the heights of the nodes in the pair. + + // We start by taking steps to allocate an indirection vector to use for storing lake indices. + // Initially, each entry in this vector will contain 0. We iterate in ascending order through + // the sorted newh. If the node has downhill == -2, it is a boundary node Ω and we store it in + // the boundary vector. If the node has downhill == -1, it is a fresh lake, and we store 0 in + // it. If the node has non-negative downhill, we use the downhill index to find the next node + // down; if the downhill node has a lake entry < 0, then downhill is a lake and its entry + // can be negated to find an (over)estimate of the number of entries it needs. If the downhill + // node has a non-negative entry, then its entry is the lake index for this node, so we should + // access that entry and increment it, then set our own entry to it. + let mut boundary = Vec::with_capacity(downhill.len()); + let mut indirection = vec![/*-1i32*/0i32; WORLD_SIZE.x * WORLD_SIZE.y].into_boxed_slice(); + + let mut newh = Vec::with_capacity(downhill.len()); + + // Now, we know that the sum of all the indirection nodes will be the same as the number of + // nodes. We can allocate a *single* vector with 8 * nodes entries, to be used as storage + // space, and augment our indirection vector with the starting index, resulting in a vector of + // slices. As we go, we replace each lake entry with its index in the new indirection buffer, + // allowing + let mut lakes = vec![(-1, 0); /*(indirection.len() - boundary.len())*/indirection.len() * 8]; + let mut indirection_ = vec![0u32; indirection.len()]; + // First, find all the lakes. + let mut lake_roots = Vec::with_capacity(downhill.len()); // Test + for (chunk_idx, &dh) in (&*downhill) + .into_iter() + .enumerate() + .filter(|(_, &dh_idx)| dh_idx < 0) + { + if dh == -2 { + // On the boundary, add to the boundary vector. + boundary.push(chunk_idx); + // Still considered a lake root, though. + } else if dh == -1 { + lake_roots.push(chunk_idx); + } else { + panic!("Impossible."); + } + // Find all the nodes uphill from this lake. Since there is only one outgoing edge + // in the "downhill" graph, this is guaranteed never to visit a node more than + // once. + let start = newh.len(); + let indirection_idx = (start * 8) as u32; + // New lake root + newh.push(chunk_idx as u32); + let mut cur = start; + while cur < newh.len() { + let node = newh[cur as usize]; + for child in uphill(downhill, node as usize) { + // lake_idx is the index of our lake root. + indirection[child] = chunk_idx as i32; + indirection_[child] = indirection_idx; + newh.push(child as u32); + } + cur += 1; + } + // Find the number of elements pushed. + let length = (cur - start) * 8; + // New lake root (lakes have indirection set to -length). + indirection[chunk_idx] = -(length as i32); + indirection_[chunk_idx] = indirection_idx; + } + assert_eq!(newh.len(), downhill.len()); + + log::info!("Old lake roots: {:?}", lake_roots.len()); + + let newh = newh.into_boxed_slice(); + // Now, we know that the sum of all the indirection nodes will be the same as the number of + // nodes. We can allocate a *single* vector with 8 * nodes entries, to be used as storage + // space, and augment our indirection vector with the starting index, resulting in a vector of + // slices. As we go, we replace each lake entry with its index in the new indirection buffer, + // allowing + for &chunk_idx_ in newh.into_iter() { + let chunk_idx = chunk_idx_ as usize; + let lake_idx_ = indirection_[chunk_idx]; + let lake_idx = lake_idx_ as usize; + let height = h[chunk_idx_ as usize]; + // For every neighbor, check to see whether it is already set; if the neighbor is set, + // its height is ≤ our height. We should search through the edge list for the + // neighbor's lake to see if there's an entry; if not, we insert, and otherwise we + // get its height. We do the same thing in our own lake's entry list. If the maximum + // of the heights we get out from this process is greater than the maximum of this + // chunk and its neighbor chunk, we switch to this new edge. + for neighbor_idx in neighbors(chunk_idx) { + let neighbor_height = h[neighbor_idx]; + let neighbor_lake_idx_ = indirection_[neighbor_idx]; + let neighbor_lake_idx = neighbor_lake_idx_ as usize; + if neighbor_lake_idx_ < lake_idx_ { + // We found an adjacent node that is not on the boundary and has already + // been processed, and also has a non-matching lake. Therefore we can use + // split_at_mut to get disjoint slices. + let (lake, neighbor_lake) = { + // println!("Okay, {:?} < {:?}", neighbor_lake_idx, lake_idx); + let (neighbor_lake, lake) = lakes.split_at_mut(lake_idx); + (lake, &mut neighbor_lake[neighbor_lake_idx..]) + }; + + // We don't actually need to know the real length here, because we've reserved + // enough spaces that we should always either find a -1 (available slot) or an + // entry for this chunk. + 'outer: for pass in lake.iter_mut() { + if pass.0 == -1 { + // println!("One time, in my mind, one time... (neighbor lake={:?} lake={:?})", neighbor_lake_idx, lake_idx_); + *pass = (chunk_idx_ as i32, neighbor_idx as u32); + // Should never run out of -1s in the neighbor lake if we didn't find + // the neighbor lake in our lake. + *neighbor_lake + .iter_mut() + .filter(|neighbor_pass| neighbor_pass.0 == -1) + .next() + .unwrap() = (neighbor_idx as i32, chunk_idx_); + // panic!("Should never happen; maybe didn't reserve enough space in lakes?") + break; + } else if indirection_[pass.1 as usize] == neighbor_lake_idx_ { + for neighbor_pass in neighbor_lake.iter_mut() { + // Should never run into -1 while looping here, since (i, j) + // and (j, i) should be added together. + if indirection_[neighbor_pass.1 as usize] == lake_idx_ { + let pass_height = h[neighbor_pass.1 as usize]; + let neighbor_pass_height = h[pass.1 as usize]; + if height.max(neighbor_height) + < pass_height.max(neighbor_pass_height) + { + *pass = (chunk_idx_ as i32, neighbor_idx as u32); + *neighbor_pass = (neighbor_idx as i32, chunk_idx_); + } + break 'outer; + } + } + // Should always find a corresponding match in the neighbor lake if + // we found the neighbor lake in our lake. + let indirection_idx = indirection[chunk_idx]; + let lake_chunk_idx = if indirection_idx >= 0 { + indirection_idx as usize + } else { + chunk_idx as usize + }; + let indirection_idx = indirection[neighbor_idx]; + let neighbor_lake_chunk_idx = if indirection_idx >= 0 { + indirection_idx as usize + } else { + neighbor_idx as usize + }; + panic!( + "For edge {:?} between lakes {:?}, couldn't find partner \ + for pass {:?}. \ + Should never happen; maybe forgot to set both edges?", + ( + (chunk_idx, uniform_idx_as_vec2(chunk_idx as usize)), + (neighbor_idx, uniform_idx_as_vec2(neighbor_idx as usize)) + ), + ( + ( + lake_chunk_idx, + uniform_idx_as_vec2(lake_chunk_idx as usize), + lake_idx_ + ), + ( + neighbor_lake_chunk_idx, + uniform_idx_as_vec2(neighbor_lake_chunk_idx as usize), + neighbor_lake_idx_ + ) + ), + ( + (pass.0, uniform_idx_as_vec2(pass.0 as usize)), + (pass.1, uniform_idx_as_vec2(pass.1 as usize)) + ), + ); + } + } + } + } + } + + // Now it's time to calculate the lake connections graph T_L covering G_L. + let mut candidates = BinaryHeap::with_capacity(indirection.len()); + // let mut pass_flows : Vec = vec![-1; indirection.len()]; + + // We start by going through each pass, deleting the ones that point out of boundary nodes and + // adding ones that point into boundary nodes from non-boundary nodes. + for edge in &mut lakes { + let edge: &mut (i32, u32) = edge; + // Only consider valid elements. + if edge.0 == -1 { + continue; + } + // Check to see if this edge points out *from* a boundary node. + // Delete it if so. + let from = edge.0 as usize; + let indirection_idx = indirection[from]; + let lake_idx = if indirection_idx < 0 { + from + } else { + indirection_idx as usize + }; + if downhill[lake_idx] == -2 { + edge.0 = -1; + continue; + } + // This edge is not pointing out from a boundary node. + // Check to see if this edge points *to* a boundary node. + // Add it to the candidate set if so. + let to = edge.1 as usize; + let indirection_idx = indirection[to]; + let lake_idx = if indirection_idx < 0 { + to + } else { + indirection_idx as usize + }; + if downhill[lake_idx] == -2 { + // Find the pass height + let pass = h[from].max(h[to]); + candidates.push(Reverse(( + NotNan::new(pass).unwrap(), + (edge.0 as u32, edge.1), + ))); + } + } + + let mut pass_flows_sorted: Vec = Vec::with_capacity(indirection.len()); + + // Now all passes pointing to the boundary are in candidates. + // As long as there are still candidates, we continue... + // NOTE: After a lake is added to the stream tree, the lake bottom's indirection entry no + // longer negates its maximum number of passes, but the lake side of the chosen pass. As such, + // we should make sure not to rely on using it this way afterwards. + // provides information about the number of candidate passes in a lake. + 'outer_final_pass: while let Some(Reverse((_, (chunk_idx, neighbor_idx)))) = candidates.pop() { + // We have the smallest candidate. + let lake_idx = indirection_[chunk_idx as usize] as usize; + let indirection_idx = indirection[chunk_idx as usize]; + let lake_chunk_idx = if indirection_idx >= 0 { + indirection_idx as usize + } else { + chunk_idx as usize + }; + if downhill[lake_chunk_idx] >= 0 { + // Candidate lake has already been connected. + continue; + } + // println!("Got here..."); + assert_eq!(downhill[lake_chunk_idx], -1); + // Candidate lake has not yet been connected, and is the lowest candidate. + // Delete all other outgoing edges. + let max_len = -if indirection_idx < 0 { + indirection_idx + } else { + indirection[indirection_idx as usize] + } as usize; + // Add this chunk to the tree. + downhill[lake_chunk_idx] = neighbor_idx as isize; + // Also set the indirection of the lake bottom to the negation of the + // lake side of the chosen pass (chunk_idx). + // NOTE: This can't overflow i32 because WORLD_SIZE.x * WORLD_SIZE.y should fit in an i32. + indirection[lake_chunk_idx] = -(chunk_idx as i32); + // Add this edge to the sorted list. + pass_flows_sorted.push(lake_chunk_idx); + // pass_flows_sorted.push((chunk_idx as u32, neighbor_idx as u32)); + for edge in &mut lakes[lake_idx..lake_idx + max_len] { + if *edge == (chunk_idx as i32, neighbor_idx as u32) { + // Skip deleting this edge. + continue; + } + // Delete the old edge, and remember it. + let edge = mem::replace(edge, (-1, 0)); + if edge.0 == -1 { + // Don't fall off the end of the list. + break; + } + // Don't add incoming pointers from already-handled lakes or boundary nodes. + let indirection_idx = indirection[edge.1 as usize]; + let neighbor_lake_idx = if indirection_idx < 0 { + edge.1 as usize + } else { + indirection_idx as usize + }; + if downhill[neighbor_lake_idx] != -1 { + continue; + } + // Find the pass height + let pass = h[edge.0 as usize].max(h[edge.1 as usize]); + // Put the reverse edge in candidates, sorted by height, then chunk idx, and finally + // neighbor idx. + candidates.push(Reverse(( + NotNan::new(pass).unwrap(), + (edge.1, edge.0 as u32), + ))); + } + // println!("I am a pass: {:?}", (uniform_idx_as_vec2(chunk_idx as usize), uniform_idx_as_vec2(neighbor_idx as usize))); + } + log::info!("Total lakes: {:?}", pass_flows_sorted.len()); + + // Perform the bfs once again. + let mut newh = Vec::with_capacity(downhill.len()); + (&*boundary) + .iter() + .chain(pass_flows_sorted.iter()) + .for_each(|&chunk_idx| { + // Find all the nodes uphill from this lake. Since there is only one outgoing edge + // in the "downhill" graph, this is guaranteed never to visit a node more than + // once. + let start = newh.len(); + // New lake root + newh.push(chunk_idx as u32); + let mut cur = start; + while cur < newh.len() { + let node = newh[cur as usize]; + + for child in uphill(downhill, node as usize) { + // lake_idx is the index of our lake root. + // Check to make sure child (flowing into us) isn't a lake. + if indirection[child] >= 0 + /* Note: equal to chunk_idx should be same */ + { + assert!(h[child] >= h[node as usize]); + newh.push(child as u32); + } + } + cur += 1; + } + }); + assert_eq!(newh.len(), downhill.len()); + (boundary.len(), indirection, newh.into_boxed_slice()) +} + +/// Perform erosion n times. +pub fn do_erosion( + erosion_base: f32, + _max_uplift: f32, + n: usize, + seed: &RandomField, + rock_strength_nz: &(impl NoiseFn> + Sync), + oldh: impl Fn(usize) -> f32 + Sync, + is_ocean: impl Fn(usize) -> bool + Sync, + uplift: impl Fn(usize) -> f32 + Sync, +) -> Box<[f32]> { + let oldh_ = (0..WORLD_SIZE.x * WORLD_SIZE.y) + .into_par_iter() + .map(|posi| oldh(posi)) + .collect::>() + .into_boxed_slice(); + // TODO: Don't do this, maybe? + // (To elaborate, maybe we should have varying uplift or compute it some other way). + let uplift = (0..oldh_.len()) + .into_par_iter() + .map(|posi| uplift(posi)) + .collect::>() + .into_boxed_slice(); + let sum_uplift = uplift + .into_par_iter() + .cloned() + .map(|e| e as f64) + .sum::(); + log::info!("Sum uplifts: {:?}", sum_uplift); + + let max_uplift = uplift + .into_par_iter() + .cloned() + .max_by(|a, b| a.partial_cmp(&b).unwrap()) + .unwrap(); + log::info!("Max uplift: {:?}", max_uplift); + let mut h = oldh_; + for i in 0..n { + log::info!("Erosion iteration #{:?}", i); + erode( + &mut h, + erosion_base, + max_uplift, + seed, + rock_strength_nz, + |posi| uplift[posi], + |posi| is_ocean(posi), + ); + } + h +} diff --git a/world/src/sim/mod.rs b/world/src/sim/mod.rs index 1cf62479d3..d18281b334 100644 --- a/world/src/sim/mod.rs +++ b/world/src/sim/mod.rs @@ -1,19 +1,24 @@ +mod erosion; mod location; mod settlement; mod util; // Reexports +pub use self::erosion::{ + do_erosion, fill_sinks, get_drainage, get_lakes, get_rivers, RiverData, RiverKind, +}; pub use self::location::Location; pub use self::settlement::Settlement; -use self::util::{ - cdf_irwin_hall, uniform_idx_as_vec2, uniform_noise, vec2_as_uniform_idx, InverseCdf, +pub use self::util::{ + cdf_irwin_hall, downhill, get_oceans, local_cells, map_edge_factor, neighbors, + uniform_idx_as_vec2, uniform_noise, uphill, vec2_as_uniform_idx, InverseCdf, }; use crate::{ all::ForestKind, column::ColumnGen, generator::TownState, - util::{seed_expan, FastNoise, Sampler, StructureGen2d}, + util::{seed_expan, FastNoise, RandomField, Sampler, StructureGen2d}, CONFIG, }; use common::{ @@ -21,45 +26,45 @@ use common::{ vol::RectVolSize, }; use noise::{ - BasicMulti, Billow, HybridMulti, MultiFractal, NoiseFn, RidgedMulti, Seedable, SuperSimplex, + BasicMulti, Billow, Fbm, HybridMulti, MultiFractal, NoiseFn, RidgedMulti, Seedable, + SuperSimplex, }; +use num::{Float, Signed}; use rand::{Rng, SeedableRng}; use rand_chacha::ChaChaRng; +use rayon::prelude::*; use std::{ collections::HashMap, - f32, + f32, f64, ops::{Add, Div, Mul, Neg, Sub}, sync::Arc, }; use vek::*; +// NOTE: I suspect this is too small (1024 * 16 * 1024 * 16 * 8 doesn't fit in an i32), but we'll see +// what happens, I guess! We could always store sizes >> 3. I think 32 or 64 is the absolute +// limit though, and would require substantial changes. Also, 1024 * 16 * 1024 * 16 is no longer +// cleanly representable in f32 (that stops around 1024 * 4 * 1024 * 4, for signed floats anyway) +// but I think that is probably less important since I don't think we actually cast a chunk id to +// float, just coordinates... could be wrong though! pub const WORLD_SIZE: Vec2 = Vec2 { x: 1024, y: 1024 }; -/// Calculates the smallest distance along an axis (x, y) from an edge of -/// the world. This value is maximal at WORLD_SIZE / 2 and minimized at the extremes -/// (0 or WORLD_SIZE on one or more axes). It then divides the quantity by cell_size, -/// so the final result is 1 when we are not in a cell along the edge of the world, and -/// ranges between 0 and 1 otherwise (lower when the chunk is closer to the edge). -fn map_edge_factor(posi: usize) -> f32 { - uniform_idx_as_vec2(posi) - .map2(WORLD_SIZE.map(|e| e as i32), |e, sz| { - (sz / 2 - (e - sz / 2).abs()) as f32 / 16.0 - }) - .reduce_partial_min() - .max(0.0) - .min(1.0) -} - /// A structure that holds cached noise values and cumulative distribution functions for the input /// that led to those values. See the definition of InverseCdf for a description of how to /// interpret the types of its fields. struct GenCdf { humid_base: InverseCdf, temp_base: InverseCdf, - alt_base: InverseCdf, chaos: InverseCdf, - alt: InverseCdf, - alt_no_seawater: InverseCdf, + alt: Box<[f32]>, + water_alt: Box<[f32]>, + dh: Box<[isize]>, + /// NOTE: Until we hit 4096 × 4096, this should suffice since integers with an absolute value + /// under 2^24 can be exactly represented in an f32. + flux: Box<[f32]>, + pure_flux: InverseCdf, + alt_no_water: InverseCdf, + rivers: Box<[RiverData]>, } pub(crate) struct GenCtx { @@ -69,8 +74,6 @@ pub(crate) struct GenCtx { pub alt_nz: HybridMulti, pub hill_nz: SuperSimplex, pub temp_nz: SuperSimplex, - // Fresh groundwater (currently has no effect, but should influence humidity) - pub dry_nz: BasicMulti, // Humidity noise pub humid_nz: Billow, // Small amounts of noise for simulating rough terrain. @@ -106,7 +109,7 @@ impl WorldSim { pub fn generate(seed: u32) -> Self { let mut rng = ChaChaRng::from_seed(seed_expan::rng_state(seed)); - let mut gen_ctx = GenCtx { + let gen_ctx = GenCtx { turb_x_nz: SuperSimplex::new().set_seed(rng.gen()), turb_y_nz: SuperSimplex::new().set_seed(rng.gen()), chaos_nz: RidgedMulti::new().set_octaves(7).set_seed(rng.gen()), @@ -116,7 +119,6 @@ impl WorldSim { .set_persistence(0.1) .set_seed(rng.gen()), temp_nz: SuperSimplex::new().set_seed(rng.gen()), - dry_nz: BasicMulti::new().set_seed(rng.gen()), small_nz: BasicMulti::new().set_octaves(2).set_seed(rng.gen()), rock_nz: HybridMulti::new().set_persistence(0.3).set_seed(rng.gen()), cliff_nz: HybridMulti::new().set_persistence(0.3).set_seed(rng.gen()), @@ -145,109 +147,318 @@ impl WorldSim { town_gen: StructureGen2d::new(rng.gen(), 2048, 1024), }; - // "Base" of the chunk, to be multiplied by CONFIG.mountain_scale (multiplied value is - // from -0.25 * (CONFIG.mountain_scale * 1.1) to 0.25 * (CONFIG.mountain_scale * 0.9), - // but value here is from -0.275 to 0.225). - let alt_base = uniform_noise(|_, wposf| { - Some( - (gen_ctx.alt_nz.get((wposf.div(10_000.0)).into_array()) as f32) - .sub(0.05) - .mul(0.35), - ) - }); + let river_seed = RandomField::new(rng.gen()); + let rock_strength_nz = Fbm::new() + .set_octaves(8) + .set_persistence(0.9) + .set_seed(rng.gen()); - // chaos produces a value in [0.1, 1.24]. It is a meta-level factor intended to reflect how - // "chaotic" the region is--how much weird stuff is going on on this terrain. - let chaos = uniform_noise(|_posi, wposf| { - // From 0 to 1.6, but the distribution before the max is from -1 and 1, so there is a - // 50% chance that hill will end up at 0. - let hill = (0.0 - + gen_ctx - .hill_nz - .get((wposf.div(1_500.0)).into_array()) - .mul(1.0) as f32 - + gen_ctx - .hill_nz - .get((wposf.div(400.0)).into_array()) - .mul(0.3) as f32) - .add(0.3) - .max(0.0); + let ((alt_base, _), (chaos, _)) = rayon::join( + || { + uniform_noise(|_, wposf| { + // "Base" of the chunk, to be multiplied by CONFIG.mountain_scale (multiplied value + // is from -0.35 * (CONFIG.mountain_scale * 1.05) to + // 0.35 * (CONFIG.mountain_scale * 0.95), but value here is from -0.3675 to 0.3325). + Some( + (gen_ctx + .alt_nz + .get((wposf.div(10_000.0)).into_array()) + .min(1.0) + .max(-1.0)) + .sub(0.05) + .mul(0.35), + ) + }) + }, + || { + uniform_noise(|_, wposf| { + // From 0 to 1.6, but the distribution before the max is from -1 and 1, so there is + // a 50% chance that hill will end up at 0. + let hill = (0.0 + + gen_ctx + .hill_nz + .get((wposf.div(1_500.0)).into_array()) + .min(1.0) + .max(-1.0) + .mul(1.0) + + gen_ctx + .hill_nz + .get((wposf.div(400.0)).into_array()) + .min(1.0) + .max(-1.0) + .mul(0.3)) + .add(0.3) + .max(0.0); - Some( - (gen_ctx.chaos_nz.get((wposf.div(3_500.0)).into_array()) as f32) - .add(1.0) - .mul(0.5) - // [0, 1] * [0.25, 1] = [0, 1] (but probably towards the lower end) - .mul( - (gen_ctx.chaos_nz.get((wposf.div(6_000.0)).into_array()) as f32) + // chaos produces a value in [0.12, 1.24]. It is a meta-level factor intended to + // reflect how "chaotic" the region is--how much weird stuff is going on on this + // terrain. + Some( + ((gen_ctx + .chaos_nz + .get((wposf.div(3_000.0)).into_array()) + .min(1.0) + .max(-1.0)) + .add(1.0) + .mul(0.5) + // [0, 1] * [0.4, 1] = [0, 1] (but probably towards the lower end) + .mul( + (gen_ctx + .chaos_nz + .get((wposf.div(6_000.0)).into_array()) + .min(1.0) + .max(-1.0)) .abs() .max(0.4) .min(1.0), + ) + // Chaos is always increased by a little when we're on a hill (but remember + // that hill is 0 about 50% of the time). + // [0, 1] + 0.15 * [0, 1.6] = [0, 1.24] + .add(0.2 * hill) + // We can't have *no* chaos! + .max(0.12)) as f32, ) - // Chaos is always increased by a little when we're on a hill (but remember that - // hill is 0 about 50% of the time). - // [0, 1] + 0.15 * [0, 1.6] = [0, 1.24] - .add(0.2 * hill) - // We can't have *no* chaos! - .max(0.12), - ) - }); + }) + }, + ); // We ignore sea level because we actually want to be relative to sea level here and want // things in CONFIG.mountain_scale units, but otherwise this is a correct altitude // calculation. Note that this is using the "unadjusted" temperature. - let alt = uniform_noise(|posi, wposf| { - // This is the extension upwards from the base added to some extra noise from -1 to 1. - // The extra noise is multiplied by alt_main (the mountain part of the extension) - // clamped to [0.25, 1], and made 60% larger (so the extra noise is between [-1.6, 1.6], - // and the final noise is never more than 160% or less than 40% of the original noise, - // depending on altitude). - // Adding this to alt_main thus yields a value between -0.4 (if alt_main = 0 and - // gen_ctx = -1) and 2.6 (if alt_main = 1 and gen_ctx = 1). When the generated small_nz - // value hits -0.625 the value crosses 0, so most of the points are above 0. + let (alt_old, alt_old_inverse) = uniform_noise(|posi, wposf| { + // This is the extension upwards from the base added to some extra noise from -1 to + // 1. // - // Then, we add 1 and divide by 2 to get a value between 0.3 and 1.8. + // The extra noise is multiplied by alt_main (the mountain part of the extension) + // powered to 0.8 and clamped to [0.15, 1], to get a value between [-1, 1] again. + // + // The sides then receive the sequence (y * 0.3 + 1.0) * 0.4, so we have + // [-1*1*(1*0.3+1)*0.4, 1*(1*0.3+1)*0.4] = [-0.52, 0.52]. + // + // Adding this to alt_main thus yields a value between -0.4 (if alt_main = 0 and + // gen_ctx = -1, 0+-1*(0*.3+1)*0.4) and 1.52 (if alt_main = 1 and gen_ctx = 1). + // Most of the points are above 0. + // + // Next, we add again by a sin of alt_main (between [-1, 1])^pow, getting + // us (after adjusting for sign) another value between [-1, 1], and then this is + // multiplied by 0.045 to get [-0.045, 0.045], which is added to [-0.4, 0.52] to get + // [-0.445, 0.565]. let alt_main = { // Extension upwards from the base. A positive number from 0 to 1 curved to be // maximal at 0. Also to be multiplied by CONFIG.mountain_scale. - let alt_main = (gen_ctx.alt_nz.get((wposf.div(2_000.0)).into_array()) as f32) - .abs() - .powf(1.35); + let alt_main = (gen_ctx + .alt_nz + .get((wposf.div(2_000.0)).into_array()) + .min(1.0) + .max(-1.0)) + .abs() + .powf(1.35); - fn spring(x: f32, pow: f32) -> f32 { + fn spring(x: f64, pow: f64) -> f64 { x.abs().powf(pow) * x.signum() } (0.0 + alt_main - + (gen_ctx.small_nz.get((wposf.div(300.0)).into_array()) as f32) - .mul(alt_main.powf(0.8).max(0.15)) - .mul(0.3) - .add(1.0) - .mul(0.4) + + (gen_ctx + .small_nz + .get((wposf.div(300.0)).into_array()) + .min(1.0) + .max(-1.0)) + .mul(alt_main.powf(0.8).max(/*0.25*/ 0.15)) + .mul(0.3) + .add(1.0) + .mul(0.4) + spring(alt_main.abs().powf(0.5).min(0.75).mul(60.0).sin(), 4.0).mul(0.045)) }; // Now we can compute the final altitude using chaos. - // We multiply by chaos clamped to [0.1, 1.24] to get a value between 0.03 and 2.232 for - // alt_pre, then multiply by CONFIG.mountain_scale and add to the base and sea level to - // get an adjusted value, then multiply the whole thing by map_edge_factor + // We multiply by chaos clamped to [0.1, 1.24] to get a value between [0.03, 2.232] + // for alt_pre, then multiply by CONFIG.mountain_scale and add to the base and sea + // level to get an adjusted value, then multiply the whole thing by map_edge_factor // (TODO: compute final bounds). + // + // [-.3675, .3325] + [-0.445, 0.565] * [0.12, 1.24]^1.2 + // ~ [-.3675, .3325] + [-0.445, 0.565] * [_, 1.30] + // = [-.3675, .3325] + ([-0.5785, 0.7345]) + // = [-0.946, 1.067] Some( - (alt_base[posi].1 + alt_main.mul(chaos[posi].1.powf(1.2))) - .mul(map_edge_factor(posi)), + ((alt_base[posi].1 + alt_main.mul((chaos[posi].1 as f64).powf(1.2))) + .mul(map_edge_factor(posi) as f64) + .add( + (CONFIG.sea_level as f64) + .div(CONFIG.mountain_scale as f64) + .mul(map_edge_factor(posi) as f64), + ) + .sub((CONFIG.sea_level as f64).div(CONFIG.mountain_scale as f64))) + as f32, ) }); + // Find the minimum and maximum original altitudes. + // NOTE: Will panic if there is no land, and will not work properly if the minimum and + // maximum land altitude are identical (will most likely panic later). + let (alt_old_min_index, _alt_old_min) = alt_old_inverse + .iter() + .copied() + .find(|&(_, h)| h > 0.0) + .unwrap(); + let &(alt_old_max_index, _alt_old_max) = alt_old_inverse.last().unwrap(); + let alt_old_min_uniform = alt_old[alt_old_min_index].0; + let alt_old_max_uniform = alt_old[alt_old_max_index].0; + + // Calculate oceans. + let old_height = |posi: usize| alt_old[posi].1; + let old_height_uniform = |posi: usize| alt_old[posi].0; + let is_ocean = get_oceans(old_height); + let is_ocean_fn = |posi: usize| is_ocean[posi]; + + // Perform some erosion. + let max_erosion_per_delta_t = 32.0 / CONFIG.mountain_scale as f64; + + // Logistic regression. Make sure x ∈ (0, 1). + let logit = |x: f64| x.ln() - (-x).ln_1p(); + // 0.5 + 0.5 * tanh(ln(1 / (1 - 0.1) - 1) / (2 * (sqrt(3)/pi))) + let logistic_2_base = 3.0f64.sqrt() * f64::consts::FRAC_2_PI; + // Assumes μ = 0, σ = 1 + let logistic_cdf = |x: f64| (x / logistic_2_base).tanh() * 0.5 + 0.5; + + let erosion_pow = 2.0; + let n_steps = 100; + let erosion_factor = |x: f64| logistic_cdf(erosion_pow * logit(x)); + let alt = do_erosion( + 0.0, + max_erosion_per_delta_t as f32, + n_steps, + &river_seed, + &rock_strength_nz, + |posi| { + if is_ocean_fn(posi) { + old_height(posi) + } else { + 5.0 / CONFIG.mountain_scale + } + }, + is_ocean_fn, + |posi| { + let height = ((old_height_uniform(posi) - alt_old_min_uniform) as f64 + / (alt_old_max_uniform - alt_old_min_uniform) as f64) + .max(1e-7 / CONFIG.mountain_scale as f64) + .min(1.0f64 - 1e-7); + let height = erosion_factor(height); + let height = height + .mul(max_erosion_per_delta_t * 7.0 / 8.0) + .add(max_erosion_per_delta_t / 8.0); + height as f32 + }, + ); + let is_ocean = get_oceans(|posi| alt[posi]); + let is_ocean_fn = |posi: usize| is_ocean[posi]; + let mut dh = downhill(&alt, /*old_height*/ is_ocean_fn); + let (boundary_len, indirection, water_alt_pos) = get_lakes(&/*water_alt*/alt, &mut dh); + let flux_old = get_drainage(&water_alt_pos, &dh, boundary_len); + + let water_height_initial = |chunk_idx| { + let indirection_idx = indirection[chunk_idx]; + // Find the lake this point is flowing into. + let lake_idx = if indirection_idx < 0 { + chunk_idx + } else { + indirection_idx as usize + }; + // Find the pass this lake is flowing into (i.e. water at the lake bottom gets + // pushed towards the point identified by pass_idx). + let neighbor_pass_idx = dh[lake_idx]; + let chunk_water_alt = if neighbor_pass_idx < 0 { + // This is either a boundary node (dh[chunk_idx] == -2, i.e. water is at sea level) + // or part of a lake that flows directly into the ocean. In the former case, water + // is at sea level so we just return 0.0. In the latter case, the lake bottom must + // have been a boundary node in the first place--meaning this node flows directly + // into the ocean. In that case, its lake bottom is ocean, meaning its water is + // also at sea level. Thus, we return 0.0 in both cases. + 0.0 + } else { + // This chunk is draining into a body of water that isn't the ocean (i.e., a lake). + // Then we just need to find the pass height of the surrounding lake in order to + // figure out the initial water height (which fill_sinks will then extend to make + // sure it fills the entire basin). + + // Find the height of the pass into which our lake is flowing. + let pass_height_j = alt[neighbor_pass_idx as usize]; + // Find the height of "our" side of the pass (the part of it that drains into this + // chunk's lake). + let pass_idx = -indirection[lake_idx]; + let pass_height_i = alt[pass_idx as usize]; + // Find the maximum of these two heights. + let pass_height = pass_height_i.max(pass_height_j); + // Use the pass height as the initial water altitude. + pass_height + }; + // Use the maximum of the pass height and chunk height as the parameter to fill_sinks. + let chunk_alt = alt[chunk_idx]; + chunk_alt.max(chunk_water_alt) + }; + + let water_alt = fill_sinks(water_height_initial, is_ocean_fn); + let rivers = get_rivers(&water_alt_pos, &water_alt, &dh, &indirection, &flux_old); + + let water_alt = indirection + .par_iter() + .enumerate() + .map(|(chunk_idx, &indirection_idx)| { + // Find the lake this point is flowing into. + let lake_idx = if indirection_idx < 0 { + chunk_idx + } else { + indirection_idx as usize + }; + // Find the pass this lake is flowing into (i.e. water at the lake bottom gets + // pushed towards the point identified by pass_idx). + let neighbor_pass_idx = dh[lake_idx]; + if neighbor_pass_idx < 0 { + // This is either a boundary node (dh[chunk_idx] == -2, i.e. water is at sea level) + // or part of a lake that flows directly into the ocean. In the former case, water + // is at sea level so we just return 0.0. In the latter case, the lake bottom must + // have been a boundary node in the first place--meaning this node flows directly + // into the ocean. In that case, its lake bottom is ocean, meaning its water is + // also at sea level. Thus, we return 0.0 in both cases. + 0.0 + } else { + // This is not flowing into the ocean, so we can use the existing water_alt. + water_alt[chunk_idx] + } + }) + .collect::>() + .into_boxed_slice(); + + let is_underwater = |chunk_idx: usize| match rivers[chunk_idx].river_kind { + Some(RiverKind::Ocean) | Some(RiverKind::Lake { .. }) => true, + Some(RiverKind::River { .. }) => false, // TODO: inspect width + None => false, + }; + // Check whether any tiles around this tile are not water (since Lerp will ensure that they // are included). - let pure_water = |posi| { + let pure_water = |posi: usize| { + /* let river_data = &rivers[posi]; + match river_data.river_kind { + Some(RiverKind::Lake { .. }) => { + // Lakes are always completely submerged. + return true; + }, + /* Some(RiverKind::River { cross_section }) if cross_section.x >= TerrainChunkSize::RECT_SIZE.x as f32 => { + // Rivers that are wide enough are considered completely submerged (not a + // completely fair approximation). + return true; + }, */ + _ => {} + } */ let pos = uniform_idx_as_vec2(posi); for x in pos.x - 1..=pos.x + 1 { for y in pos.y - 1..=pos.y + 1 { if x >= 0 && y >= 0 && x < WORLD_SIZE.x as i32 && y < WORLD_SIZE.y as i32 { let posi = vec2_as_uniform_idx(Vec2::new(x, y)); - if alt[posi].1.mul(CONFIG.mountain_scale) > -8.0 { - // Account for warping in later stages + if !is_underwater(posi) { return false; } } @@ -256,51 +467,74 @@ impl WorldSim { true }; - // A version of alt that is uniform over *non-seawater* (or land-adjacent seawater) chunks. - let alt_no_seawater = uniform_noise(|posi, _wposf| { + let (pure_flux, _) = uniform_noise(|posi, _| { if pure_water(posi) { None } else { - Some(alt[posi].1) + Some(flux_old[posi]) } }); - // -1 to 1. - let temp_base = uniform_noise(|posi, wposf| { - if pure_water(posi) { - None - } else { - Some(gen_ctx.temp_nz.get((wposf.div(12000.0)).into_array()) as f32) - } - }); - - // 0 to 1, hopefully. - let humid_base = uniform_noise(|posi, wposf| { - // Check whether any tiles around this tile are water. - if pure_water(posi) { - None - } else { - Some( - (gen_ctx.humid_nz.get(wposf.div(1024.0).into_array()) as f32) - .add(1.0) - .mul(0.5), + let ((alt_no_water, _), ((temp_base, _), (humid_base, _))) = rayon::join( + || { + uniform_noise(|posi, _| { + if pure_water(posi) { + None + } else { + // A version of alt that is uniform over *non-water* (or land-adjacent water) + // chunks. + Some(alt[posi]) + } + }) + }, + || { + rayon::join( + || { + uniform_noise(|posi, wposf| { + if pure_water(posi) { + None + } else { + // -1 to 1. + Some(gen_ctx.temp_nz.get((wposf.div(12000.0)).into_array()) as f32) + } + }) + }, + || { + uniform_noise(|posi, wposf| { + // Check whether any tiles around this tile are water. + if pure_water(posi) { + None + } else { + // 0 to 1, hopefully. + Some( + (gen_ctx.humid_nz.get(wposf.div(1024.0).into_array()) as f32) + .add(1.0) + .mul(0.5), + ) + } + }) + }, ) - } - }); + }, + ); let gen_cdf = GenCdf { humid_base, temp_base, - alt_base, chaos, alt, - alt_no_seawater, + water_alt, + dh, + flux: flux_old, + pure_flux, + alt_no_water, + rivers, }; - let mut chunks = Vec::new(); - for i in 0..WORLD_SIZE.x * WORLD_SIZE.y { - chunks.push(SimChunk::generate(i, &mut gen_ctx, &gen_cdf)); - } + let chunks = (0..WORLD_SIZE.x * WORLD_SIZE.y) + .into_par_iter() + .map(|i| SimChunk::generate(i, &gen_ctx, &gen_cdf)) + .collect::>(); let mut this = Self { seed: seed, @@ -315,6 +549,43 @@ impl WorldSim { this } + /// Draw a map of the world based on chunk information. Returns a buffer of u32s. + pub fn get_map(&self) -> Vec { + (0..WORLD_SIZE.x * WORLD_SIZE.y) + .into_par_iter() + .map(|chunk_idx| { + let pos = uniform_idx_as_vec2(chunk_idx); + + let (alt, water_alt, river_kind) = self + .get(pos) + .map(|sample| (sample.alt, sample.water_alt, sample.river.river_kind)) + .unwrap_or((CONFIG.sea_level, CONFIG.sea_level, None)); + let alt = ((alt - CONFIG.sea_level) / CONFIG.mountain_scale) + .min(1.0) + .max(0.0); + let water_alt = ((alt.max(water_alt) - CONFIG.sea_level) / CONFIG.mountain_scale) + .min(1.0) + .max(0.0); + match river_kind { + Some(RiverKind::Ocean) => u32::from_le_bytes([64, 32, 0, 255]), + Some(RiverKind::Lake { .. }) => u32::from_le_bytes([ + 64 + (water_alt * 191.0) as u8, + 32 + (water_alt * 95.0) as u8, + 0, + 255, + ]), + Some(RiverKind::River { .. }) => u32::from_le_bytes([ + 64 + (alt * 191.0) as u8, + 32 + (alt * 95.0) as u8, + 0, + 255, + ]), + None => u32::from_le_bytes([0, (alt * 255.0) as u8, 0, 255]), + } + }) + .collect() + } + /// Prepare the world for simulation pub fn seed_elements(&mut self) { let mut rng = self.rng.clone(); @@ -349,9 +620,9 @@ impl WorldSim { .enumerate() .collect::>(); for i in 0..locations.len() { - let pos = locations[i].center; + let pos = locations[i].center.map(|e| e as i64); - loc_clone.sort_by_key(|(_, l)| l.distance_squared(pos)); + loc_clone.sort_by_key(|(_, l)| l.map(|e| e as i64).distance_squared(pos)); loc_clone.iter().skip(1).take(2).for_each(|(j, _)| { locations[i].neighbours.insert(*j); @@ -367,11 +638,13 @@ impl WorldSim { if loc_grid[j * grid_size.x + i].is_none() { const R_COORDS: [i32; 5] = [-1, 0, 1, 0, -1]; let idx = self.rng.gen::() % 4; - let loc = Vec2::new(i as i32 + R_COORDS[idx], j as i32 + R_COORDS[idx + 1]) - .map(|e| e as usize); - - loc_grid[j * grid_size.x + i] = - loc_grid.get(loc.y * grid_size.x + loc.x).cloned().flatten(); + let new_i = i as i32 + R_COORDS[idx]; + let new_j = j as i32 + R_COORDS[idx + 1]; + if new_i >= 0 && new_j >= 0 { + let loc = Vec2::new(new_i as usize, new_j as usize); + loc_grid[j * grid_size.x + i] = + loc_grid.get(loc.y * grid_size.x + loc.x).cloned().flatten(); + } } } } @@ -404,26 +677,32 @@ impl WorldSim { // Sort regions based on distance near.sort_by(|a, b| a.dist.partial_cmp(&b.dist).unwrap()); - let nearest_cell_pos = near[0].chunk_pos.map(|e| e as usize) / cell_size; - self.get_mut(chunk_pos).unwrap().location = loc_grid - .get(nearest_cell_pos.y * grid_size.x + nearest_cell_pos.x) - .cloned() - .unwrap_or(None) - .map(|loc_idx| LocationInfo { loc_idx, near }); + let nearest_cell_pos = near[0].chunk_pos; + if nearest_cell_pos.x >= 0 && nearest_cell_pos.y >= 0 { + let nearest_cell_pos = nearest_cell_pos.map(|e| e as usize) / cell_size; + self.get_mut(chunk_pos).unwrap().location = loc_grid + .get(nearest_cell_pos.y * grid_size.x + nearest_cell_pos.x) + .cloned() + .unwrap_or(None) + .map(|loc_idx| LocationInfo { loc_idx, near }); - let town_size = 200; - let in_town = self - .get(chunk_pos) - .unwrap() - .location - .as_ref() - .map(|l| { - locations[l.loc_idx].center.distance_squared(block_pos) - < town_size * town_size - }) - .unwrap_or(false); - if in_town { - self.get_mut(chunk_pos).unwrap().spawn_rate = 0.0; + let town_size = 200; + let in_town = self + .get(chunk_pos) + .unwrap() + .location + .as_ref() + .map(|l| { + locations[l.loc_idx] + .center + .map(|e| e as i64) + .distance_squared(block_pos.map(|e| e as i64)) + < town_size * town_size + }) + .unwrap_or(false); + if in_town { + self.get_mut(chunk_pos).unwrap().spawn_rate = 0.0; + } } } } @@ -446,6 +725,7 @@ impl WorldSim { let maybe_town = maybe_towns .entry(*pos) .or_insert_with(|| { + // println!("Town: {:?}", town); TownState::generate(*pos, &mut ColumnGen::new(self), &mut rng) .map(|t| Arc::new(t)) }) @@ -456,7 +736,6 @@ impl WorldSim { < town.radius().add(64).pow(2) }) .cloned(); - self.get_mut(chunk_pos).unwrap().structures.town = maybe_town; } } @@ -497,17 +776,27 @@ impl WorldSim { } pub fn get_base_z(&self, chunk_pos: Vec2) -> Option { - self.get(chunk_pos).and_then(|_| { - (0..2) - .map(|i| (0..2).map(move |j| (i, j))) - .flatten() - .map(|(i, j)| { - self.get(chunk_pos + Vec2::new(i, j)) - .map(|c| c.get_base_z()) - }) - .flatten() - .fold(None, |a: Option, x| a.map(|a| a.min(x)).or(Some(x))) - }) + if !chunk_pos + .map2(WORLD_SIZE, |e, sz| e > 0 && e < sz as i32 - 2) + .reduce_and() + { + return None; + } + + let chunk_idx = vec2_as_uniform_idx(chunk_pos); + local_cells(chunk_idx) + .flat_map(|neighbor_idx| { + let neighbor_pos = uniform_idx_as_vec2(neighbor_idx); + let neighbor_chunk = self.get(neighbor_pos); + let river_kind = neighbor_chunk.and_then(|c| c.river.river_kind); + let has_water = river_kind.is_some() && river_kind != Some(RiverKind::Ocean); + if (neighbor_pos - chunk_pos).reduce_partial_max() <= 1 || has_water { + neighbor_chunk.map(|c| c.get_base_z()) + } else { + None + } + }) + .fold(None, |a: Option, x| a.map(|a| a.min(x)).or(Some(x))) } pub fn get_interpolated(&self, pos: Vec2, mut f: F) -> Option @@ -544,12 +833,133 @@ impl WorldSim { Some(cubic(x[0], x[1], x[2], x[3], pos.x.fract() as f32)) } + + /// M. Steffen splines. + /// + /// A more expensive cubic interpolation function that can preserve monotonicity between + /// points. This is useful if you rely on relative differences between endpoints being + /// preserved at all interior points. For example, we use this with riverbeds (and water + /// height on along rivers) to maintain the invariant that the rivers always flow downhill at + /// interior points (not just endpoints), without needing to flatten out the river. + pub fn get_interpolated_monotone(&self, pos: Vec2, mut f: F) -> Option + where + T: Copy + Default + Signed + Float + Add + Mul, + F: FnMut(&SimChunk) -> T, + { + // See http://articles.adsabs.harvard.edu/cgi-bin/nph-iarticle_query?1990A%26A...239..443S&defaultprint=YES&page_ind=0&filetype=.pdf + // + // Note that these are only guaranteed monotone in one dimension; fortunately, that is + // sufficient for our purposes. + let pos = pos.map2(TerrainChunkSize::RECT_SIZE, |e, sz: u32| { + e as f64 / sz as f64 + }); + + let secant = |b: T, c: T| c - b; + + let parabola = |a: T, c: T| -a * 0.5 + c * 0.5; + + let slope = |_a: T, _b: T, _c: T, s_a: T, s_b: T, p_b: T| { + // ((b - a).signum() + (c - b).signum()) * s + (s_a.signum() + s_b.signum()) * (s_a.abs().min(s_b.abs()).min(p_b.abs() * 0.5)) + }; + + let cubic = |a: T, b: T, c: T, d: T, x: f32| -> T { + // Compute secants. + let s_a = secant(a, b); + let s_b = secant(b, c); + let s_c = secant(c, d); + // Computing slopes from parabolas. + let p_b = parabola(a, c); + let p_c = parabola(b, d); + // Get slopes (setting distance between neighbors to 1.0). + let slope_b = slope(a, b, c, s_a, s_b, p_b); + let slope_c = slope(b, c, d, s_b, s_c, p_c); + let x2 = x * x; + + // Interpolating splines. + let co0 = slope_b + slope_c - s_b * 2.0; + // = a * -0.5 + c * 0.5 + b * -0.5 + d * 0.5 - 2 * (c - b) + // = a * -0.5 + b * 1.5 - c * 1.5 + d * 0.5; + let co1 = s_b * 3.0 - slope_b * 2.0 - slope_c; + // = (3.0 * (c - b) - 2.0 * (a * -0.5 + c * 0.5) - (b * -0.5 + d * 0.5)) + // = a + b * -2.5 + c * 2.0 + d * -0.5; + let co2 = slope_b; + // = a * -0.5 + c * 0.5; + let co3 = b; + + co0 * x2 * x + co1 * x2 + co2 * x + co3 + }; + + let mut x = [T::default(); 4]; + + for (x_idx, j) in (-1..3).enumerate() { + let y0 = f(self.get(pos.map2(Vec2::new(j, -1), |e, q| e.max(0.0) as i32 + q))?); + let y1 = f(self.get(pos.map2(Vec2::new(j, 0), |e, q| e.max(0.0) as i32 + q))?); + let y2 = f(self.get(pos.map2(Vec2::new(j, 1), |e, q| e.max(0.0) as i32 + q))?); + let y3 = f(self.get(pos.map2(Vec2::new(j, 2), |e, q| e.max(0.0) as i32 + q))?); + + x[x_idx] = cubic(y0, y1, y2, y3, pos.y.fract() as f32); + } + + Some(cubic(x[0], x[1], x[2], x[3], pos.x.fract() as f32)) + } + + /// Bilinear interpolation. + /// + /// Linear interpolation in both directions (i.e. quadratic interpolation). + pub fn get_interpolated_bilinear(&self, pos: Vec2, mut f: F) -> Option + where + T: Copy + Default + Signed + Float + Add + Mul, + F: FnMut(&SimChunk) -> T, + { + // (i) Find downhill for all four points. + // (ii) Compute distance from each downhill point and do linear interpolation on their heights. + // (iii) Compute distance between each neighboring point and do linear interpolation on + // their distance-interpolated heights. + + // See http://articles.adsabs.harvard.edu/cgi-bin/nph-iarticle_query?1990A%26A...239..443S&defaultprint=YES&page_ind=0&filetype=.pdf + // + // Note that these are only guaranteed monotone in one dimension; fortunately, that is + // sufficient for our purposes. + let pos = pos.map2(TerrainChunkSize::RECT_SIZE, |e, sz: u32| { + e as f64 / sz as f64 + }); + + // Orient the chunk in the direction of the most downhill point of the four. If there is + // no "most downhill" point, then we don't care. + let x0 = pos.map2(Vec2::new(0, 0), |e, q| e.max(0.0) as i32 + q); + let p0 = self.get(x0)?; + let y0 = f(p0); + + let x1 = pos.map2(Vec2::new(1, 0), |e, q| e.max(0.0) as i32 + q); + let p1 = self.get(x1)?; + let y1 = f(p1); + + let x2 = pos.map2(Vec2::new(0, 1), |e, q| e.max(0.0) as i32 + q); + let p2 = self.get(x2)?; + let y2 = f(p2); + + let x3 = pos.map2(Vec2::new(1, 1), |e, q| e.max(0.0) as i32 + q); + let p3 = self.get(x3)?; + let y3 = f(p3); + + let z0 = y0 + .mul(1.0 - pos.x.fract() as f32) + .mul(1.0 - pos.y.fract() as f32); + let z1 = y1.mul(pos.x.fract() as f32).mul(1.0 - pos.y.fract() as f32); + let z2 = y2.mul(1.0 - pos.x.fract() as f32).mul(pos.y.fract() as f32); + let z3 = y3.mul(pos.x.fract() as f32).mul(pos.y.fract() as f32); + + Some(z0 + z1 + z2 + z3) + } } pub struct SimChunk { pub chaos: f32, - pub alt_base: f32, pub alt: f32, + pub water_alt: f32, + pub downhill: Option>, + pub flux: f32, pub temp: f32, pub humidity: f32, pub rockiness: f32, @@ -559,6 +969,7 @@ pub struct SimChunk { pub forest_kind: ForestKind, pub spawn_rate: f32, pub location: Option, + pub river: RiverData, pub structures: Structures, } @@ -583,56 +994,124 @@ pub struct Structures { } impl SimChunk { - fn generate(posi: usize, gen_ctx: &mut GenCtx, gen_cdf: &GenCdf) -> Self { + fn generate(posi: usize, gen_ctx: &GenCtx, gen_cdf: &GenCdf) -> Self { let pos = uniform_idx_as_vec2(posi); let wposf = (pos * TerrainChunkSize::RECT_SIZE.map(|e| e as i32)).map(|e| e as f64); - let (_, alt_base) = gen_cdf.alt_base[posi]; - let map_edge_factor = map_edge_factor(posi); + let _map_edge_factor = map_edge_factor(posi); let (_, chaos) = gen_cdf.chaos[posi]; let (humid_uniform, _) = gen_cdf.humid_base[posi]; - let (_, alt_pre) = gen_cdf.alt[posi]; - let (alt_uniform, _) = gen_cdf.alt_no_seawater[posi]; + let alt_pre = gen_cdf.alt[posi]; + let water_alt_pre = gen_cdf.water_alt[posi]; + let downhill_pre = gen_cdf.dh[posi]; + let (flux_uniform, _) = gen_cdf.pure_flux[posi]; + let flux = gen_cdf.flux[posi]; + let (alt_uniform, _) = gen_cdf.alt_no_water[posi]; let (temp_uniform, _) = gen_cdf.temp_base[posi]; + let river = gen_cdf.rivers[posi].clone(); + + /* // Vertical difference from the equator (NOTE: "uniform" with much lower granularity than + // other uniform quantities, but hopefully this doesn't matter *too* much--if it does, we + // can always add a small x component). + // + // Not clear that we want this yet, let's see. + let latitude_uniform = (pos.y as f32 / WORLD_SIZE.y as f32).sub(0.5).mul(2.0); + + // Even less granular--if this matters we can make the sign affect the quantiy slightly. + let abs_lat_uniform = latitude_uniform.abs(); */ // Take the weighted average of our randomly generated base humidity, the scaled - // negative altitude, and other random variable (to add some noise) to yield the - // final humidity. Note that we are using the "old" version of chaos here. - const HUMID_WEIGHTS: [f32; 2] = [1.0, 1.0]; - let humidity = cdf_irwin_hall(&HUMID_WEIGHTS, [humid_uniform, 1.0 - alt_uniform]); + // negative altitude, and the calculated water flux over this point in order to compute + // humidity. + const HUMID_WEIGHTS: [f32; 3] = [4.0, 1.0, 1.0]; + let humidity = cdf_irwin_hall( + &HUMID_WEIGHTS, + [humid_uniform, flux_uniform, 1.0 - alt_uniform], + ); - // We also correlate temperature negatively with altitude using different weighting than we - // use for humidity. - const TEMP_WEIGHTS: [f32; 2] = [2.0, 1.0]; - let temp = cdf_irwin_hall(&TEMP_WEIGHTS, [temp_uniform, 1.0 - alt_uniform]) - // Convert to [-1, 1] - .sub(0.5) - .mul(2.0); + // We also correlate temperature negatively with altitude and absolute latitude, using + // different weighting than we use for humidity. + const TEMP_WEIGHTS: [f32; 2] = [2.0, 1.0 /*, 1.0*/]; + let temp = cdf_irwin_hall( + &TEMP_WEIGHTS, + [ + temp_uniform, + 1.0 - alt_uniform, /* 1.0 - abs_lat_uniform*/ + ], + ) + // Convert to [-1, 1] + .sub(0.5) + .mul(2.0); - let alt_base = alt_base.mul(CONFIG.mountain_scale); - let alt = CONFIG + let mut alt = CONFIG.sea_level.add(alt_pre.mul(CONFIG.mountain_scale)); + let water_alt = CONFIG .sea_level - .mul(map_edge_factor) - .add(alt_pre.mul(CONFIG.mountain_scale)); + .add(water_alt_pre.mul(CONFIG.mountain_scale)); + let downhill = if downhill_pre == -2 { + None + } else if downhill_pre < 0 { + panic!("Uh... shouldn't this never, ever happen?"); + } else { + Some( + uniform_idx_as_vec2(downhill_pre as usize) + * TerrainChunkSize::RECT_SIZE.map(|e| e as i32), + ) + }; let cliff = gen_ctx.cliff_nz.get((wposf.div(2048.0)).into_array()) as f32 + chaos * 0.2; // Logistic regression. Make sure x ∈ (0, 1). - let logit = |x: f32| x.ln() - x.neg().ln_1p(); + let logit = |x: f64| x.ln() - x.neg().ln_1p(); // 0.5 + 0.5 * tanh(ln(1 / (1 - 0.1) - 1) / (2 * (sqrt(3)/pi))) - let logistic_2_base = 3.0f32.sqrt().mul(f32::consts::FRAC_2_PI); + let logistic_2_base = 3.0f64.sqrt().mul(f64::consts::FRAC_2_PI); // Assumes μ = 0, σ = 1 - let logistic_cdf = |x: f32| x.div(logistic_2_base).tanh().mul(0.5).add(0.5); + let logistic_cdf = |x: f64| x.div(logistic_2_base).tanh().mul(0.5).add(0.5); + + let is_underwater = match river.river_kind { + Some(RiverKind::Ocean) | Some(RiverKind::Lake { .. }) => true, + Some(RiverKind::River { .. }) => false, // TODO: inspect width + None => false, + }; + let river_xy = Vec2::new(river.velocity.x, river.velocity.y).magnitude(); + let river_slope = river.velocity.z / river_xy; + match river.river_kind { + Some(RiverKind::River { cross_section }) => { + if cross_section.x >= 0.5 && cross_section.y >= CONFIG.river_min_height { + /* println!( + "Big area! Pos area: {:?}, River data: {:?}, slope: {:?}", + wposf, river, river_slope + ); */ + } + if river_slope.abs() >= 1.0 && cross_section.x >= 1.0 { + log::info!( + "Big waterfall! Pos area: {:?}, River data: {:?}, slope: {:?}", + wposf, + river, + river_slope + ); + } + } + Some(RiverKind::Lake { .. }) => { + // Forces lakes to be downhill from the land around them, and adds some noise to + // the lake bed to make sure it's not too flat. + let lake_bottom_nz = (gen_ctx.small_nz.get((wposf.div(20.0)).into_array()) as f32) + .max(-1.0) + .min(1.0) + .mul(3.0); + alt = alt.min(water_alt - 5.0) + lake_bottom_nz; + } + _ => {} + } // No trees in the ocean or with zero humidity (currently) - let tree_density = if alt <= CONFIG.sea_level + 5.0 { + let tree_density = if is_underwater { 0.0 } else { - let tree_density = (gen_ctx.tree_nz.get((wposf.div(1024.0)).into_array()) as f32) + let tree_density = (gen_ctx.tree_nz.get((wposf.div(1024.0)).into_array())) .mul(1.5) .add(1.0) .mul(0.5) - .mul(1.2 - chaos * 0.95) + .mul(1.2 - chaos as f64 * 0.95) .add(0.05) .max(0.0) .min(1.0); @@ -643,25 +1122,31 @@ impl SimChunk { 1.0 } else { // Weighted logit sum. - logistic_cdf(logit(humidity) + 0.5 * logit(tree_density)) + logistic_cdf(logit(humidity as f64) + 0.5 * logit(tree_density)) } // rescale to (-0.95, 0.95) .sub(0.5) .mul(0.95) .add(0.5) - }; + } as f32; Self { chaos, - alt_base, + flux, alt, + water_alt, + downhill, temp, humidity, - rockiness: (gen_ctx.rock_nz.get((wposf.div(1024.0)).into_array()) as f32) - .sub(0.1) - .mul(1.3) - .max(0.0), - is_cliffs: cliff > 0.5 && alt > CONFIG.sea_level + 5.0, + rockiness: if true { + (gen_ctx.rock_nz.get((wposf.div(1024.0)).into_array()) as f32) + .sub(0.1) + .mul(1.3) + .max(0.0) + } else { + 0.0 + }, + is_cliffs: cliff > 0.5 && !is_underwater, near_cliffs: cliff > 0.2, tree_density, forest_kind: if temp > 0.0 { @@ -682,6 +1167,9 @@ impl SimChunk { } } else if temp > CONFIG.tropical_temp { if humidity > CONFIG.jungle_hum { + if tree_density > 0.0 { + // println!("Mangrove: {:?}", wposf); + } ForestKind::Mangrove } else if humidity > CONFIG.forest_hum { // NOTE: Probably the wrong kind of tree for this climate. @@ -724,7 +1212,7 @@ impl SimChunk { }, spawn_rate: 1.0, location: None, - + river, structures: Structures { town: None }, } } diff --git a/world/src/sim/util.rs b/world/src/sim/util.rs index 6080d98024..d64f8a8e90 100644 --- a/world/src/sim/util.rs +++ b/world/src/sim/util.rs @@ -1,7 +1,26 @@ use super::WORLD_SIZE; +use bitvec::prelude::{bitbox, bitvec, BitBox}; use common::{terrain::TerrainChunkSize, vol::RectVolSize}; +use num::Float; +use rayon::prelude::*; +use std::{f32, f64, u32}; use vek::*; +/// Calculates the smallest distance along an axis (x, y) from an edge of +/// the world. This value is maximal at WORLD_SIZE / 2 and minimized at the extremes +/// (0 or WORLD_SIZE on one or more axes). It then divides the quantity by cell_size, +/// so the final result is 1 when we are not in a cell along the edge of the world, and +/// ranges between 0 and 1 otherwise (lower when the chunk is closer to the edge). +pub fn map_edge_factor(posi: usize) -> f32 { + uniform_idx_as_vec2(posi) + .map2(WORLD_SIZE.map(|e| e as i32), |e, sz| { + (sz / 2 - (e - sz / 2).abs()) as f32 / 16.0 + }) + .reduce_partial_min() + .max(0.0) + .min(1.0) +} + /// Computes the cumulative distribution function of the weighted sum of k independent, /// uniformly distributed random variables between 0 and 1. For each variable i, we use weights[i] /// as the weight to give samples[i] (the weights should all be positive). @@ -91,7 +110,7 @@ pub fn cdf_irwin_hall(weights: &[f32; N], samples: [f32; N]) -> /// generated the index. /// /// NOTE: Length should always be WORLD_SIZE.x * WORLD_SIZE.y. -pub type InverseCdf = Box<[(f32, f32)]>; +pub type InverseCdf = Box<[(f32, F)]>; /// Computes the position Vec2 of a SimChunk from an index, where the index was generated by /// uniform_noise. @@ -135,9 +154,12 @@ pub fn vec2_as_uniform_idx(idx: Vec2) -> usize { /// Returns a vec of (f32, f32) pairs consisting of the percentage of chunks with a value lower than /// this one, and the actual noise value (we don't need to cache it, but it makes ensuring that /// subsequent code that needs the noise value actually uses the same one we were using here -/// easier). -pub fn uniform_noise(f: impl Fn(usize, Vec2) -> Option) -> InverseCdf { +/// easier). Also returns the "inverted index" pointing from a position to a noise. +pub fn uniform_noise( + f: impl Fn(usize, Vec2) -> Option + Sync, +) -> (InverseCdf, Box<[(usize, F)]>) { let mut noise = (0..WORLD_SIZE.x * WORLD_SIZE.y) + .into_par_iter() .filter_map(|i| { (f( i, @@ -151,16 +173,130 @@ pub fn uniform_noise(f: impl Fn(usize, Vec2) -> Option) -> InverseCdf // sort_unstable_by is equivalent to sort_by here since we include a unique index in the // comparison. We could leave out the index, but this might make the order not // reproduce the same way between different versions of Rust (for example). - noise.sort_unstable_by(|f, g| (f.1, f.0).partial_cmp(&(g.1, g.0)).unwrap()); + noise.par_sort_unstable_by(|f, g| (f.1, f.0).partial_cmp(&(g.1, g.0)).unwrap()); // Construct a vector that associates each chunk position with the 1-indexed // position of the noise in the sorted vector (divided by the vector length). // This guarantees a uniform distribution among the samples (excluding those that returned // None, which will remain at zero). - let mut uniform_noise = vec![(0.0, 0.0); WORLD_SIZE.x * WORLD_SIZE.y].into_boxed_slice(); + let mut uniform_noise = vec![(0.0, F::zero()); WORLD_SIZE.x * WORLD_SIZE.y].into_boxed_slice(); + // NOTE: Consider using try_into here and elsewhere in this function, since i32::MAX + // technically doesn't fit in an f32 (even if we should never reach that limit). let total = noise.len() as f32; - for (noise_idx, (chunk_idx, noise_val)) in noise.into_iter().enumerate() { + for (noise_idx, &(chunk_idx, noise_val)) in noise.iter().enumerate() { uniform_noise[chunk_idx] = ((1 + noise_idx) as f32 / total, noise_val); } - uniform_noise + (uniform_noise, noise.into_boxed_slice()) +} + +/// Iterate through all cells adjacent and including four chunks whose top-left point is posi. +/// This isn't just the immediate neighbors of a chunk plus the center, because it is designed +/// to cover neighbors of a point in the chunk's "interior." +/// +/// This is what's used during cubic interpolation, for example, as it guarantees that for any +/// point between the given chunk (on the top left) and its top-right/down-right/down neighbors, +/// the twelve chunks surrounding this box (its "perimeter") are also inspected. +pub fn local_cells(posi: usize) -> impl Clone + Iterator { + let pos = uniform_idx_as_vec2(posi); + // NOTE: want to keep this such that the chunk index is in ascending order! + let grid_size = 3i32; + let grid_bounds = 2 * grid_size + 1; + (0..grid_bounds * grid_bounds) + .into_iter() + .map(move |index| { + Vec2::new( + pos.x + (index % grid_bounds) - grid_size, + pos.y + (index / grid_bounds) - grid_size, + ) + }) + .filter(|pos| { + pos.x >= 0 && pos.y >= 0 && pos.x < WORLD_SIZE.x as i32 && pos.y < WORLD_SIZE.y as i32 + }) + .map(vec2_as_uniform_idx) +} + +/// Iterate through all cells adjacent to a chunk. +pub fn neighbors(posi: usize) -> impl Clone + Iterator { + let pos = uniform_idx_as_vec2(posi); + // NOTE: want to keep this such that the chunk index is in ascending order! + [ + (-1, -1), + (0, -1), + (1, -1), + (-1, 0), + (1, 0), + (-1, 1), + (0, 1), + (1, 1), + ] + .into_iter() + .map(move |&(x, y)| Vec2::new(pos.x + x, pos.y + y)) + .filter(|pos| { + pos.x >= 0 && pos.y >= 0 && pos.x < WORLD_SIZE.x as i32 && pos.y < WORLD_SIZE.y as i32 + }) + .map(vec2_as_uniform_idx) +} + +// Note that we should already have okay cache locality since we have a grid. +pub fn uphill<'a>(dh: &'a [isize], posi: usize) -> impl Clone + Iterator + 'a { + neighbors(posi).filter(move |&posj| dh[posj] == posi as isize) +} + +/// Compute the neighbor "most downhill" from all chunks. +/// +/// TODO: See if allocating in advance is worthwhile. +pub fn downhill(h: &[f32], is_ocean: impl Fn(usize) -> bool + Sync) -> Box<[isize]> { + // Constructs not only the list of downhill nodes, but also computes an ordering (visiting + // nodes in order from roots to leaves). + h.par_iter() + .enumerate() + .map(|(posi, &nh)| { + let _pos = uniform_idx_as_vec2(posi); + if is_ocean(posi) { + -2 + } else { + let mut best = -1; + let mut besth = nh; + for nposi in neighbors(posi) { + let nbh = h[nposi]; + if nbh < besth { + besth = nbh; + best = nposi as isize; + } + } + best + } + }) + .collect::>() + .into_boxed_slice() +} + +/// Find all ocean tiles from a height map, using an inductive definition of ocean as one of: +/// - posi is at the side of the world (map_edge_factor(posi) == 0.0) +/// - posi has a neighboring ocean tile, and has a height below sea level (oldh(posi) <= 0.0). +pub fn get_oceans(oldh: impl Fn(usize) -> f32 + Sync) -> BitBox { + // We can mark tiles as ocean candidates by scanning row by row, since the top edge is ocean, + // the sides are connected to it, and any subsequent ocean tiles must be connected to it. + let mut is_ocean = bitbox![0; WORLD_SIZE.x * WORLD_SIZE.y]; + let mut stack = Vec::new(); + for x in 0..WORLD_SIZE.x as i32 { + stack.push(vec2_as_uniform_idx(Vec2::new(x, 0))); + stack.push(vec2_as_uniform_idx(Vec2::new(x, WORLD_SIZE.y as i32 - 1))); + } + for y in 1..WORLD_SIZE.y as i32 - 1 { + stack.push(vec2_as_uniform_idx(Vec2::new(0, y))); + stack.push(vec2_as_uniform_idx(Vec2::new(WORLD_SIZE.x as i32 - 1, y))); + } + while let Some(chunk_idx) = stack.pop() { + // println!("Ocean chunk {:?}: {:?}", uniform_idx_as_vec2(chunk_idx), oldh(chunk_idx)); + if *is_ocean.at(chunk_idx) { + continue; + } + *is_ocean.at(chunk_idx) = true; + stack.extend(neighbors(chunk_idx).filter(|&neighbor_idx| { + // println!("Ocean neighbor: {:?}: {:?}", uniform_idx_as_vec2(neighbor_idx), oldh(neighbor_idx)); + oldh(neighbor_idx) <= 0.0 + })); + } + is_ocean }