veloren/world/src/sim/mod.rs

2363 lines
100 KiB
Rust
Raw Normal View History

2019-11-19 18:34:52 +00:00
mod diffusion;
mod erosion;
2019-06-10 16:28:02 +00:00
mod location;
mod map;
2019-06-25 15:59:09 +00:00
mod settlement;
2019-08-22 23:31:27 +00:00
mod util;
2019-06-10 16:28:02 +00:00
2019-06-18 21:22:31 +00:00
// Reexports
2019-11-19 18:34:52 +00:00
pub use self::diffusion::diffusion;
use self::erosion::Compute;
pub use self::erosion::{
do_erosion, fill_sinks, get_drainage, get_lakes, get_multi_drainage, get_multi_rec, get_rivers,
mrec_downhill, Alt, RiverData, RiverKind,
};
2019-06-18 21:22:31 +00:00
pub use self::location::Location;
pub use self::map::{MapConfig, MapDebug};
2019-06-25 15:59:09 +00:00
pub use self::settlement::Settlement;
pub use self::util::{
cdf_irwin_hall, downhill, get_oceans, local_cells, map_edge_factor, neighbors,
uniform_idx_as_vec2, uniform_noise, uphill, vec2_as_uniform_idx, HybridMulti as HybridMulti_,
InverseCdf, ScaleBias, NEIGHBOR_DELTA,
};
2019-06-18 21:22:31 +00:00
2019-06-22 21:44:27 +00:00
use crate::{
all::ForestKind,
block::BlockGen,
2019-08-24 22:57:55 +00:00
column::ColumnGen,
2019-08-23 21:33:14 +00:00
generator::TownState,
util::{seed_expan, FastNoise, RandomField, Sampler, StructureGen2d},
2019-06-22 21:44:27 +00:00
CONFIG,
};
2019-06-18 21:22:31 +00:00
use common::{
terrain::{BiomeKind, TerrainChunkSize},
common: Rework volume API See the doc comments in `common/src/vol.rs` for more information on the API itself. The changes include: * Consistent `Err`/`Error` naming. * Types are named `...Error`. * `enum` variants are named `...Err`. * Rename `VolMap{2d, 3d}` -> `VolGrid{2d, 3d}`. This is in preparation to an upcoming change where a “map” in the game related sense will be added. * Add volume iterators. There are two types of them: * _Position_ iterators obtained from the trait `IntoPosIterator` using the method `fn pos_iter(self, lower_bound: Vec3<i32>, upper_bound: Vec3<i32>) -> ...` which returns an iterator over `Vec3<i32>`. * _Volume_ iterators obtained from the trait `IntoVolIterator` using the method `fn vol_iter(self, lower_bound: Vec3<i32>, upper_bound: Vec3<i32>) -> ...` which returns an iterator over `(Vec3<i32>, &Self::Vox)`. Those traits will usually be implemented by references to volume types (i.e. `impl IntoVolIterator<'a> for &'a T` where `T` is some type which usually implements several volume traits, such as `Chunk`). * _Position_ iterators iterate over the positions valid for that volume. * _Volume_ iterators do the same but return not only the position but also the voxel at that position, in each iteration. * Introduce trait `RectSizedVol` for the use case which we have with `Chonk`: A `Chonk` is sized only in x and y direction. * Introduce traits `RasterableVol`, `RectRasterableVol` * `RasterableVol` represents a volume that is compile-time sized and has its lower bound at `(0, 0, 0)`. The name `RasterableVol` was chosen because such a volume can be used with `VolGrid3d`. * `RectRasterableVol` represents a volume that is compile-time sized at least in x and y direction and has its lower bound at `(0, 0, z)`. There's no requirement on he lower bound or size in z direction. The name `RectRasterableVol` was chosen because such a volume can be used with `VolGrid2d`.
2019-09-03 22:23:29 +00:00
vol::RectVolSize,
2019-06-18 21:22:31 +00:00
};
use hashbrown::HashMap;
2019-08-22 21:25:17 +00:00
use noise::{
BasicMulti, Billow, Fbm, HybridMulti, MultiFractal, NoiseFn, RangeFunction, RidgedMulti,
Seedable, SuperSimplex, Worley,
2019-08-22 21:25:17 +00:00
};
use num::{Float, Signed};
2019-07-30 14:10:59 +00:00
use rand::{Rng, SeedableRng};
use rand_chacha::ChaChaRng;
use rayon::prelude::*;
use serde_derive::{Deserialize, Serialize};
2019-08-18 16:35:27 +00:00
use std::{
f32, f64,
fs::File,
io::{BufReader, BufWriter},
2019-08-18 16:35:27 +00:00
ops::{Add, Div, Mul, Neg, Sub},
path::PathBuf,
2019-08-23 21:33:14 +00:00
sync::Arc,
2019-08-18 16:35:27 +00:00
};
2019-06-09 10:24:18 +00:00
use vek::*;
// NOTE: I suspect this is too small (1024 * 16 * 1024 * 16 * 8 doesn't fit in an i32), but we'll see
// what happens, I guess! We could always store sizes >> 3. I think 32 or 64 is the absolute
// limit though, and would require substantial changes. Also, 1024 * 16 * 1024 * 16 is no longer
// cleanly representable in f32 (that stops around 1024 * 4 * 1024 * 4, for signed floats anyway)
// but I think that is probably less important since I don't think we actually cast a chunk id to
// float, just coordinates... could be wrong though!
pub const WORLD_SIZE: Vec2<usize> = Vec2 { x: 1024, y: 1024 };
2019-08-22 21:48:13 +00:00
/// A structure that holds cached noise values and cumulative distribution functions for the input
/// that led to those values. See the definition of InverseCdf for a description of how to
/// interpret the types of its fields.
2019-08-21 18:41:32 +00:00
struct GenCdf {
humid_base: InverseCdf,
temp_base: InverseCdf,
2019-08-22 19:03:42 +00:00
chaos: InverseCdf,
alt: Box<[Alt]>,
basement: Box<[Alt]>,
water_alt: Box<[f32]>,
dh: Box<[isize]>,
/// NOTE: Until we hit 4096 × 4096, this should suffice since integers with an absolute value
/// under 2^24 can be exactly represented in an f32.
flux: Box<[Compute]>,
pure_flux: InverseCdf<Compute>,
alt_no_water: InverseCdf,
rivers: Box<[RiverData]>,
2019-08-21 18:41:32 +00:00
}
pub(crate) struct GenCtx {
2019-06-21 00:53:11 +00:00
pub turb_x_nz: SuperSimplex,
pub turb_y_nz: SuperSimplex,
pub chaos_nz: RidgedMulti,
2019-11-19 18:34:52 +00:00
pub alt_nz: HybridMulti_,
pub hill_nz: SuperSimplex,
2019-11-06 11:19:50 +00:00
pub temp_nz: Fbm,
2019-08-18 16:35:27 +00:00
// Humidity noise
2019-08-22 21:25:17 +00:00
pub humid_nz: Billow,
2019-08-19 17:20:54 +00:00
// Small amounts of noise for simulating rough terrain.
pub small_nz: BasicMulti,
pub rock_nz: HybridMulti,
2019-06-10 14:22:59 +00:00
pub cliff_nz: HybridMulti,
2019-08-23 23:25:01 +00:00
pub warp_nz: FastNoise,
pub tree_nz: BasicMulti,
pub cave_0_nz: SuperSimplex,
pub cave_1_nz: SuperSimplex,
2019-06-09 10:24:18 +00:00
2019-07-09 23:51:54 +00:00
pub structure_gen: StructureGen2d,
pub region_gen: StructureGen2d,
2019-06-21 00:53:11 +00:00
pub cliff_gen: StructureGen2d,
2019-08-23 23:25:01 +00:00
pub fast_turb_x_nz: FastNoise,
pub fast_turb_y_nz: FastNoise,
2019-08-24 13:23:42 +00:00
pub town_gen: StructureGen2d,
pub river_seed: RandomField,
pub rock_strength_nz: Fbm,
2019-11-25 03:47:33 +00:00
pub uplift_nz: Worley,
}
pub enum FileOpts {
/// If set, generate the world map and do not try to save to or load from file
/// (default).
Generate,
/// If set, generate the world map and save the world file (path is created
/// the same way screenshot paths are).
Save,
/// If set, load the world file from this path (errors if path not found).
Load(PathBuf),
}
impl Default for FileOpts {
fn default() -> Self {
Self::Generate
}
}
pub struct WorldOpts {
/// Set to false to disable seeding elements during worldgen.
pub seed_elements: bool,
pub world_file: FileOpts,
}
impl Default for WorldOpts {
fn default() -> Self {
Self {
seed_elements: true,
world_file: Default::default(),
}
}
}
/// A way to store certain components between runs of map generation. Only intended for
/// development purposes--no attempt is made to detect map invalidation or make sure that the map
/// is synchronized with updates to noise-rs, changes to other parameters, etc.
#[derive(Serialize, Deserialize)]
pub struct WorldFile {
/// Saved altitude height map.
pub alt: Box<[Alt]>,
/// Saved basement height map.
pub basement: Box<[Alt]>,
}
pub struct WorldSim {
pub seed: u32,
pub(crate) chunks: Vec<SimChunk>,
2019-06-25 15:59:09 +00:00
pub(crate) locations: Vec<Location>,
pub(crate) gen_ctx: GenCtx,
2019-07-30 14:10:59 +00:00
pub rng: ChaChaRng,
}
impl WorldSim {
pub fn generate(seed: u32, opts: WorldOpts) -> Self {
let mut rng = ChaChaRng::from_seed(seed_expan::rng_state(seed));
let continent_scale = 5_000.0f64 /*32768.0*/
.div(32.0)
.mul(TerrainChunkSize::RECT_SIZE.x as f64);
let rock_lacunarity = /*0.5*/2.0/*HybridMulti::DEFAULT_LACUNARITY*/;
let uplift_scale = /*512.0*//*256.0*/128.0;
let uplift_turb_scale = uplift_scale / 4.0/*32.0*//*64.0*/;
let gen_ctx = GenCtx {
turb_x_nz: SuperSimplex::new().set_seed(rng.gen()),
turb_y_nz: SuperSimplex::new().set_seed(rng.gen()),
chaos_nz: RidgedMulti::new()
2019-11-19 18:34:52 +00:00
.set_octaves(/*7*//*3*/ /*7*//*3*/7)
.set_frequency(
RidgedMulti::DEFAULT_FREQUENCY * (5_000.0 / continent_scale)
// /*RidgedMulti::DEFAULT_FREQUENCY **/ 3_000.0 * 8.0 / continent_scale,
)
2019-11-19 18:34:52 +00:00
// .set_persistence(RidgedMulti::DEFAULT_LACUNARITY.powf(-(1.0 - 0.5)))
.set_seed(rng.gen()),
hill_nz: SuperSimplex::new().set_seed(rng.gen()),
2019-11-19 18:34:52 +00:00
alt_nz: HybridMulti_::new()
.set_octaves(/*3*//*2*/ /*8*//*3*/8)
// 1/2048*32*1024 = 16
.set_frequency(
/*HybridMulti::DEFAULT_FREQUENCY*/
2019-11-19 18:34:52 +00:00
// (2^8*(10000/5000/10000))*32 = per-chunk
(10_000.0/* * 2.0*/ / continent_scale) as f64,
)
// .set_frequency(1.0 / ((1 << 0) as f64))
// .set_lacunarity(1.0)
2019-11-19 18:34:52 +00:00
// persistence = lacunarity^(-(1.0 - fractal increment))
.set_lacunarity(HybridMulti_::DEFAULT_LACUNARITY)
.set_persistence(HybridMulti_::DEFAULT_LACUNARITY.powf(-(1.0 - /*0.75*/0.0)))
2019-11-19 18:34:52 +00:00
// .set_persistence(/*0.5*//*0.5*/0.5 + 1.0 / ((1 << 6) as f64))
// .set_offset(/*0.7*//*0.5*//*0.75*/0.7)
.set_offset(/*0.7*//*0.5*//*0.75*/0.0)
.set_seed(rng.gen()),
2019-11-06 11:19:50 +00:00
//temp_nz: SuperSimplex::new().set_seed(rng.gen()),
temp_nz: Fbm::new()
.set_octaves(6)
.set_persistence(0.5)
// 1/2^14*1024*32 = 2
// 1/(2^14-2^12)*1024*32 = 8/3 ~= 3
.set_frequency(
/*4.0 / /*(1024.0 * 4.0/* * 8.0*/)*//*32.0*/((1 << 6) * (WORLD_SIZE.x)) as f64*/
1.0 / (((1 << 6) * 64) as f64),
)
// .set_frequency(1.0 / 1024.0)
// .set_frequency(1.0 / (1024.0 * 8.0))
.set_lacunarity(2.0)
.set_seed(rng.gen()),
2019-11-06 11:19:50 +00:00
small_nz: BasicMulti::new().set_octaves(2).set_seed(rng.gen()),
rock_nz: HybridMulti::new().set_persistence(0.3).set_seed(rng.gen()),
cliff_nz: HybridMulti::new().set_persistence(0.3).set_seed(rng.gen()),
warp_nz: FastNoise::new(rng.gen()), //BasicMulti::new().set_octaves(3).set_seed(gen_seed()),
tree_nz: BasicMulti::new()
.set_octaves(12)
.set_persistence(0.75)
.set_seed(rng.gen()),
cave_0_nz: SuperSimplex::new().set_seed(rng.gen()),
cave_1_nz: SuperSimplex::new().set_seed(rng.gen()),
2019-06-09 10:24:18 +00:00
2019-09-25 22:53:43 +00:00
structure_gen: StructureGen2d::new(rng.gen(), 32, 16),
region_gen: StructureGen2d::new(rng.gen(), 400, 96),
cliff_gen: StructureGen2d::new(rng.gen(), 80, 56),
2019-08-18 23:52:26 +00:00
humid_nz: Billow::new()
2019-11-06 11:19:50 +00:00
.set_octaves(9)
.set_persistence(0.4)
2019-11-06 11:19:50 +00:00
.set_frequency(0.2)
2019-08-18 23:52:26 +00:00
// .set_octaves(6)
// .set_persistence(0.5)
.set_seed(rng.gen()),
2019-08-23 23:25:01 +00:00
fast_turb_x_nz: FastNoise::new(rng.gen()),
fast_turb_y_nz: FastNoise::new(rng.gen()),
2019-08-24 13:23:42 +00:00
town_gen: StructureGen2d::new(rng.gen(), 2048, 1024),
river_seed: RandomField::new(rng.gen()),
rock_strength_nz: Fbm/*HybridMulti_*//*BasicMulti*//*Fbm*/::new()
.set_octaves(/*6*//*5*//*4*//*5*//*4*//*6*/10)
.set_lacunarity(rock_lacunarity)
2019-11-19 18:34:52 +00:00
// persistence = lacunarity^(-(1.0 - fractal increment))
// NOTE: In paper, fractal increment is roughly 0.25.
// .set_offset(0.0)
// .set_offset(0.7)
.set_persistence(/*0.9*/ /*2.0*//*1.5*//*HybridMulti::DEFAULT_LACUNARITY*/rock_lacunarity.powf(-(1.0 - 0.25/*0.75*//*0.9*/)))
2019-11-19 18:34:52 +00:00
// 256*32/2^4
// (0.5^(-(1.0-0.9)))^4/256/32*2^4*16*32
// (0.5^(-(1.0-0.9)))^4/256/32*2^4*256*4
// (0.5^(-(1.0-0.9)))^1/256/32*2^4*256*4
// (2^(-(1.0-0.9)))^4
// 16.0
.set_frequency(/*0.9*/ /*Fbm*//*HybridMulti_::DEFAULT_FREQUENCY*/1.0 / (2.0/*8.0*//*256.0*//*1.0*//*16.0*/ * TerrainChunkSize::RECT_SIZE.x as f64/*4.0*//* TerrainChunkSize::RECT_SIZE.x as f64 */ * 2.0.powi(10 - 1)))
// .set_frequency(/*0.9*/ /*Fbm*//*HybridMulti_::DEFAULT_FREQUENCY*/1.0 / (8.0/*8.0*//*256.0*//*1.0*//*16.0*/ * 32.0/*4.0*//* TerrainChunkSize::RECT_SIZE.x as f64 */ * 2.0.powi(10 - 1)))
2019-11-19 18:34:52 +00:00
// .set_persistence(/*0.9*/ /*2.0*/0.67)
// .set_frequency(/*0.9*/ Fbm::DEFAULT_FREQUENCY / (2.0 * 32.0))
// .set_lacunarity(0.5)
.set_seed(rng.gen()),
2019-11-25 03:47:33 +00:00
uplift_nz: Worley::new()
.set_seed(rng.gen())
.set_frequency(1.0 / (TerrainChunkSize::RECT_SIZE.x as f64 * uplift_scale))
// .set_displacement(/*0.5*/0.0)
.set_displacement(/*0.5*/1.0)
.set_range_function(RangeFunction::Euclidean)
// .enable_range(true),
// g_nz: RidgedMulti::new()
};
let river_seed = &gen_ctx.river_seed;
let rock_strength_nz = &gen_ctx.rock_strength_nz;
2019-11-19 18:34:52 +00:00
// NOTE: octaves should definitely fit into i32, but we should check anyway to make
// sure.
/* assert!(rock_strength_nz.persistence > 0.0);
let rock_strength_scale = (1..rock_strength_nz.octaves as i32)
.map(|octave| rock_strength_nz.persistence.powi(octave + 1))
.sum::<f64>()
// For some reason, this is "scaled" by 3.0.
.mul(3.0);
let rock_strength_nz = ScaleBias::new(&rock_strength_nz)
.set_scale(1.0 / rock_strength_scale); */
let height_scale = 1.0f64; // 1.0 / CONFIG.mountain_scale as f64;
let max_erosion_per_delta_t = /*8.0*//*32.0*//*1.0*//*32.0*//*32.0*//*64.0*/16.0/*128.0*//*1.0*//*0.2 * /*100.0*/250.0*//*128.0*//*16.0*//*128.0*//*32.0*/ * height_scale;
let erosion_pow_low = /*0.25*//*1.5*//*2.0*//*0.5*//*4.0*//*0.25*//*1.0*//*2.0*//*1.5*//*1.5*//*0.35*//*0.43*//*0.5*//*0.45*//*0.37*/1.002;
let erosion_pow_high = /*1.5*//*1.0*//*0.55*//*0.51*//*2.0*/1.002;
let erosion_center = /*0.45*//*0.75*//*0.75*//*0.5*//*0.75*/0.5;
let n_steps = /*200*//*10_000*//*1000*//*50*//*100*/100; //100; // /*100*//*50*//*100*//*100*//*50*//*25*/25/*100*//*37*/;//150;//37/*100*/;//50;//50;//37;//50;//37; // /*37*//*29*//*40*//*150*/37; //150;//200;
let n_small_steps = 0; //25;//8;//50;//50;//8;//8;//8;//8;//8; // 8
let n_post_load_steps = 0; //25;//8
// Logistic regression. Make sure x ∈ (0, 1).
let logit = |x: f64| x.ln() - (-x).ln_1p();
// 0.5 + 0.5 * tanh(ln(1 / (1 - 0.1) - 1) / (2 * (sqrt(3)/pi)))
let logistic_2_base = 3.0f64.sqrt() * f64::consts::FRAC_2_PI;
let logistic_base = /*3.0f64.sqrt() * f64::consts::FRAC_1_PI*/1.0f64;
// Assumes μ = 0, σ = 1
let logistic_cdf = |x: f64| (x / logistic_2_base).tanh() * 0.5 + 0.5;
let exp_inverse_cdf = |x: f64/*, pow: f64*/| -(-x).ln_1p()/* / ln(pow)*/;
// 2 / pi * ln(tan(pi/2 * p))
let hypsec_inverse_cdf =
|x: f64| f64::consts::FRAC_2_PI * ((x * f64::consts::FRAC_PI_2).tan().ln());
let min_epsilon =
1.0 / (WORLD_SIZE.x as f64 * WORLD_SIZE.y as f64).max(f64::EPSILON as f64 * 0.5);
let max_epsilon = (1.0 - 1.0 / (WORLD_SIZE.x as f64 * WORLD_SIZE.y as f64))
.min(1.0 - f64::EPSILON as f64 * 0.5);
2019-11-19 18:34:52 +00:00
// fractal dimension should be between 0 and 0.9999...
// (lacunarity^octaves)^(-H) = persistence^(octaves)
// lacunarity^(octaves*-H) = persistence^(octaves)
// e^(-octaves*H*ln(lacunarity)) = e^(octaves * ln(persistence))
// -octaves * H * ln(lacunarity) = octaves * ln(persistence)
// -H = ln(persistence) / ln(lacunarity)
// H = -ln(persistence) / ln(lacunarity)
// ln(persistence) = -H * ln(lacunarity)
// persistence = lacunarity^(-H)
//
// -ln(2^(-0.25))/ln(2) = 0.25
//
// -ln(2^(-0.1))/ln(2)
//
// 0 = -ln(persistence) / ln(lacunarity)
// 0 = ln(persistence) => persistence = e^0 = 1
//
// 1 = -ln(persistence) / ln(lacunarity)
// -ln(lacunarity) = ln(persistence)
// e^(-ln(lacunarity)) = e^(ln(persistence))
// 1 / lacunarity = persistence
//
// Ergo, we should not set fractal dimension to anything not between 1 / lacunarity and 1.
//
// dimension = -ln(0.25)/ln(2*pi/3) = 1.875
//
// (2*pi/3^1)^(-(-ln(0.25)/ln(2*pi/3))) = 0.25
//
// Default should be at most 1 / lacunarity.
//
// (2 * pi / 3)^(-ln(0.25)/ln(2*pi/3))
//
// -ln(0.25)/ln(2*pi/3) = 1.88
//
// (2 * pi / 3)^(-ln(0.25)/ln(2*pi/3))
//
// 2 * pi / 3
//
// 2.0^(2(-ln(1.5)/ln(2)))
// (1 / 1.5)^(2)
// No NaNs in these uniform vectors, since the original noise value always returns Some.
let ((alt_base, _), (chaos, _)) = rayon::join(
|| {
uniform_noise(|_, wposf| {
// "Base" of the chunk, to be multiplied by CONFIG.mountain_scale (multiplied value
// is from -0.35 * (CONFIG.mountain_scale * 1.05) to
// 0.35 * (CONFIG.mountain_scale * 0.95), but value here is from -0.3675 to 0.3325).
Some(
(gen_ctx
.alt_nz
.get((wposf.div(10_000.0)).into_array())
.min(1.0)
.max(-1.0)/* .mul(0.25)
.add(0.125) */)
// .add(0.5)
.sub(0.05)
2019-11-19 18:34:52 +00:00
// .add(0.05)
// .add(0.075)
.mul(0.35), /*-0.0175*/
)
})
},
|| {
uniform_noise(|_, wposf| {
// From 0 to 1.6, but the distribution before the max is from -1 and 1.6, so there is
// a 50% chance that hill will end up at 0.3 or lower, and probably a very high
// change it will be exactly 0.
let hill = (0.0f64
//.add(0.0)
+ gen_ctx
.hill_nz
2019-11-19 18:34:52 +00:00
.get((wposf.mul(32.0).div(TerrainChunkSize::RECT_SIZE.map(|e| e as f64)).div(1_500.0)).into_array())
.min(1.0)
.max(-1.0)
.mul(1.0)
+ gen_ctx
.hill_nz
2019-11-19 18:34:52 +00:00
.get((wposf.mul(32.0).div(TerrainChunkSize::RECT_SIZE.map(|e| e as f64)).div(400.0)).into_array())
.min(1.0)
.max(-1.0)
.mul(0.3))
.add(0.3)
.max(0.0);
// chaos produces a value in [0.12, 1.32]. It is a meta-level factor intended to
// reflect how "chaotic" the region is--how much weird stuff is going on on this
// terrain.
Some(
((gen_ctx
.chaos_nz
.get((wposf.div(3_000.0)).into_array())
.min(1.0)
.max(-1.0))
.add(1.0)
.mul(0.5)
// [0, 1] * [0.4, 1] = [0, 1] (but probably towards the lower end)
//.mul(1.0)
.mul(
(gen_ctx
.chaos_nz
.get((wposf.div(6_000.0)).into_array())
.min(1.0)
.max(-1.0))
2019-08-24 19:32:07 +00:00
.abs()
2019-09-24 17:56:51 +00:00
.max(0.4)
2019-08-24 19:32:07 +00:00
.min(1.0),
)
// Chaos is always increased by a little when we're on a hill (but remember
// that hill is 0.3 or less about 50% of the time).
// [0, 1] + 0.2 * [0, 1.6] = [0, 1.32]
.add(0.2 * hill)
// We can't have *no* chaos!
.max(0.12)) as f32,
2019-08-24 19:32:07 +00:00
)
})
},
);
2019-08-21 18:41:32 +00:00
// We ignore sea level because we actually want to be relative to sea level here and want
2019-08-22 22:57:42 +00:00
// things in CONFIG.mountain_scale units, but otherwise this is a correct altitude
// calculation. Note that this is using the "unadjusted" temperature.
//
// No NaNs in these uniform vectors, since the original noise value always returns Some.
let (alt_old, /*alt_old_inverse*/ _) = uniform_noise(|posi, wposf| {
// This is the extension upwards from the base added to some extra noise from -1 to
// 1.
//
2019-08-22 22:57:42 +00:00
// The extra noise is multiplied by alt_main (the mountain part of the extension)
// powered to 0.8 and clamped to [0.15, 1], to get a value between [-1, 1] again.
//
// The sides then receive the sequence (y * 0.3 + 1.0) * 0.4, so we have
// [-1*1*(1*0.3+1)*0.4, 1*(1*0.3+1)*0.4] = [-0.52, 0.52].
//
2019-08-22 22:57:42 +00:00
// Adding this to alt_main thus yields a value between -0.4 (if alt_main = 0 and
// gen_ctx = -1, 0+-1*(0*.3+1)*0.4) and 1.52 (if alt_main = 1 and gen_ctx = 1).
// Most of the points are above 0.
2019-08-22 22:57:42 +00:00
//
// Next, we add again by a sin of alt_main (between [-1, 1])^pow, getting
// us (after adjusting for sign) another value between [-1, 1], and then this is
// multiplied by 0.045 to get [-0.045, 0.045], which is added to [-0.4, 0.52] to get
// [-0.445, 0.565].
2019-08-22 22:57:42 +00:00
let alt_main = {
// Extension upwards from the base. A positive number from 0 to 1 curved to be
// maximal at 0. Also to be multiplied by CONFIG.mountain_scale.
let alt_main = (gen_ctx
.alt_nz
.get((wposf.div(2_000.0)).into_array())
.min(1.0)
.max(-1.0))
.abs()
2019-11-19 18:34:52 +00:00
// 0.5
.powf(1.35);
fn spring(x: f64, pow: f64) -> f64 {
2019-09-24 17:56:51 +00:00
x.abs().powf(pow) * x.signum()
}
(0.0 + alt_main/*0.4*/
+ (gen_ctx
.small_nz
2019-11-19 18:34:52 +00:00
.get((wposf.mul(32.0).div(TerrainChunkSize::RECT_SIZE.map(|e| e as f64)).div(300.0)).into_array())
.min(1.0)
.max(-1.0))
.mul(alt_main.powf(0.8).max(/*0.25*/ 0.15))
.mul(0.3)
.add(1.0)
.mul(0.4)
2019-11-19 18:34:52 +00:00
// 0.52
2019-09-24 17:56:51 +00:00
+ spring(alt_main.abs().powf(0.5).min(0.75).mul(60.0).sin(), 4.0).mul(0.045))
2019-08-22 22:57:42 +00:00
};
// Now we can compute the final altitude using chaos.
// We multiply by chaos clamped to [0.1, 1.32] to get a value between [0.03, 2.232]
// for alt_pre, then multiply by CONFIG.mountain_scale and add to the base and sea
// level to get an adjusted value, then multiply the whole thing by map_edge_factor
2019-08-22 22:57:42 +00:00
// (TODO: compute final bounds).
//
// [-.3675, .3325] + [-0.445, 0.565] * [0.12, 1.32]^1.2
// ~ [-.3675, .3325] + [-0.445, 0.565] * [0.07, 1.40]
// = [-.3675, .3325] + ([-0.5785, 0.7345])
// = [-0.946, 1.067]
2019-09-24 17:56:51 +00:00
Some(
((alt_base[posi].1
+ alt_main /*1.0*/
.mul(
(chaos[posi].1 as f64) /*.mul(2.0).sub(1.0).max(0.0)*/
.powf(1.2), /*0.25)*//*0.285*/
)/*0.1425*/)
.mul(map_edge_factor(posi) as f64)
.add(
(CONFIG.sea_level as f64)
.div(CONFIG.mountain_scale as f64)
.mul(map_edge_factor(posi) as f64),
)
.sub((CONFIG.sea_level as f64).div(CONFIG.mountain_scale as f64)))
as f32,
)
/* Some(
// FIXME: May fail on big-endian platforms.
((alt_base[posi].1 as f64 + 0.5 + (/*alt_main./*to_le_bytes()[7]*/to_bits() & 1) as f64 * ((1.0 / CONFIG.mountain_scale as f64).powf(1.0 / erosion_pow_low)) + */alt_main / CONFIG.mountain_scale as f64 * 128.0).mul(0.1).powf(1.2))
.mul(map_edge_factor(posi) as f64)
.add(
(CONFIG.sea_level as f64)
.div(CONFIG.mountain_scale as f64)
.mul(map_edge_factor(posi) as f64),
)
.sub((CONFIG.sea_level as f64).div(CONFIG.mountain_scale as f64)))
as f32,
) */
2019-08-24 19:13:32 +00:00
});
// Calculate oceans.
let old_height =
|posi: usize| alt_old[posi].1 * CONFIG.mountain_scale * height_scale as f32;
2019-11-19 18:34:52 +00:00
/* let is_ocean = (0..WORLD_SIZE.x * WORLD_SIZE.y)
.into_par_iter()
.map(|i| map_edge_factor(i) == 0.0)
.collect::<Vec<_>>(); */
let is_ocean = get_oceans(old_height);
let is_ocean_fn = |posi: usize| is_ocean[posi];
let turb_wposf_div = 8.0/*64.0*/;
let uplift_nz_dist = gen_ctx.uplift_nz.clone().enable_range(true);
// Recalculate altitudes without oceans.
// NaNs in these uniform vectors wherever is_ocean_fn returns true.
let (alt_old_no_ocean, alt_old_inverse) = uniform_noise(|posi, _| {
if is_ocean_fn(posi) {
None
} else {
Some(old_height(posi) /*.abs()*/)
}
});
let (uplift_uniform, _) = uniform_noise(|posi, wposf| {
if is_ocean_fn(posi) {
None
} else {
let turb_wposf = wposf
.div(TerrainChunkSize::RECT_SIZE.map(|e| e as f64))
.div(turb_wposf_div);
let turb = Vec2::new(
gen_ctx.turb_x_nz.get(turb_wposf.into_array()),
gen_ctx.turb_y_nz.get(turb_wposf.into_array()),
) * uplift_turb_scale
* TerrainChunkSize::RECT_SIZE.map(|e| e as f64);
// let turb = Vec2::zero();
let turb_wposf = wposf + turb;
let turb_wposi = turb_wposf
.map2(TerrainChunkSize::RECT_SIZE, |e, f| e / f as f64)
.map2(WORLD_SIZE, |e, f| (e as i32).max(f as i32 - 1).min(0));
let turb_posi = vec2_as_uniform_idx(turb_wposi);
let udist = uplift_nz_dist
.get(turb_wposf.into_array())
.min(1.0)
.max(-1.0)
.mul(0.5)
.add(0.5);
let uheight = gen_ctx
.uplift_nz
.get(turb_wposf.into_array())
/* .min(0.5)
.max(-0.5)*/
.min(1.0)
.max(-1.0)
.mul(0.5)
.add(0.5);
let uchaos = /* gen_ctx.chaos_nz.get((wposf.div(3_000.0)).into_array())
.min(1.0)
.max(-1.0)
.mul(0.5)
.add(0.5); */
chaos[posi].1;
let uchaos_1 = (uchaos as f64) / 1.32;
let oheight = /*alt_old*//*alt_base*/alt_old_no_ocean[/*(turb_posi / 64) * 64*/posi].0 as f64 - 0.5;
assert!(udist >= 0.0);
assert!(udist <= 1.0);
let uheight_1 = uheight; //.powf(2.0);
let udist_1 = (0.5 - udist).mul(2.0).max(0.0);
let udist_2 = udist.mul(2.0).min(1.0);
let udist_3 = (1.0 - udist).max(0.0);
let udist_4 = udist.min(1.0);
let variation = 1.0.min(
64.0 * 64.0
/ (WORLD_SIZE.x as f64
* WORLD_SIZE.y as f64
* (TerrainChunkSize::RECT_SIZE.x as f64
* TerrainChunkSize::RECT_SIZE.y as f64
/ 128.0
/ 128.0)),
);
let variation_1 = (uheight * /*udist_2*/udist_4).min(variation);
let height = (oheight + 0.5).powf(2.0);
// 1.0 - variation + variation * uchaos_1;
// uheight * /*udist_2*/udist_4 - variation_1 + variation_1 * uchaos_1;
// uheight * (0.5 + 0.5 * ((uchaos as f64) / 1.32)) - 0.125;
// 0.2;
// 1.0;
// uheight_1;
// uheight_1 * (0.8 + 0.2 * oheight.signum() * oheight.abs().powf(0.25));
// uheight_1 * (/*udist_2*/udist.powf(2.0) * (f64::consts::PI * 2.0 * (1.0 / (1.0 - udist).max(f64::EPSILON)).min(2.5)/*udist * 5.0*/ * 2.0).cos().mul(0.5).add(0.5));
// uheight * udist_ * (udist_ * 4.0 * 2 * f64::consts::PI).sin()
// uheight;
// (0.8 * uheight + oheight.powf(2.0) * 0.2).max(0.0).min(1.0);
// ((0.8 - 0.2) * uheight + 0.2 + oheight.signum() * oheight.abs().powf(/*0.5*/2.0) * udist_2.powf(2.0)).max(0.0).min(1.0);
// ((0.8 - 0.2) * uheight + 0.2 + oheight.signum() * oheight.abs().powf(/*0.5*/2.0) * 0.2).max(0.0).min(1.0);
// (0.8 * uheight * udist_1 + 0.8 * udist_2 + oheight.powf(2.0) * 0.2).max(0.0).min(1.0);
/* uheight * 0.8 * udist_1.powf(2.0) +
/*exp_inverse_cdf*/(oheight/*.max(0.0).min(max_epsilon).abs()*/).powf(2.0) * 0.2 * udist_2.powf(2.0); */
// (uheight + oheight.powf(2.0) * 0.05).max(0.0).min(1.0);
// (uheight + oheight.powf(2.0) * 0.2).max(0.0).min(1.0);
// * (1.0 - udist);// uheight * (1.0 - udist)/*oheight*//* * udist*/ + oheight * udist;/*uheight * (1.0 - udist);*/
// let height = uheight * (0.5 - udist) * 0.8 + (oheight.signum() * oheight.max(0.0).abs().powf(2.0)) * 0.2;// * (1.0 - udist);// uheight * (1.0 - udist)/*oheight*//* * udist*/ + oheight * udist;/*uheight * (1.0 - udist);*/
Some(height)
}
});
let old_height_uniform = |posi: usize| alt_old_no_ocean[posi].0;
let alt_old_min_uniform = 0.0;
let alt_old_max_uniform = 1.0;
let alt_old_center_uniform = erosion_center;
let (_alt_old_min_index, alt_old_min) = alt_old_inverse.first().unwrap();
let (_alt_old_max_index, alt_old_max) = alt_old_inverse.last().unwrap();
let (_alt_old_mid_index, alt_old_mid) =
alt_old_inverse[(alt_old_inverse.len() as f64 * erosion_center) as usize];
let alt_old_center =
((alt_old_mid - alt_old_min) as f64 / (alt_old_max - alt_old_min) as f64);
/* // Find the minimum and maximum original altitudes.
// NOTE: Will panic if there is no land, and will not work properly if the minimum and
// maximum land altitude are identical (will most likely panic later).
let old_height_uniform = |posi: usize| alt_old[posi].0;
let (alt_old_min_index, _alt_old_min) = alt_old_inverse
.iter()
.copied()
.find(|&(_, h)| h > 0.0)
.unwrap();
let &(alt_old_max_index, _alt_old_max) = alt_old_inverse.last().unwrap();
let alt_old_min_uniform = alt_old[alt_old_min_index].0;
let alt_old_max_uniform = alt_old[alt_old_max_index].0; */
// Perform some erosion.
// 2^((2^10-2)/256) = 15.91...
// -ln(1-(1-(2^(-22)*0.5)))
// -ln(1-(1-(2^(-53)*0.5)))
// ((-ln(1-((1-2^(-53)*0.5))))/ln(e))/((-ln(1-((2^(-53)*0.5))))/ln(e))
// ((-ln(1-((0.5))))/ln(2))/((-ln(1-((1 - 2^(-53)*0.5))))/ln(2))
// ((-ln(1-((0.5))))/ln(e))/((-ln(1-((1 - 2^(-53)*0.5))))/ln(e))
// ((-ln(1-((0.5))))/ln(e))/((-ln(1-((2^(-53)*0.5))))/ln(e))
// ((-ln(1-((1-2^(-53)))))/ln(1.002))/((-ln(1-((1 - 2^(-53)*0.5))))/ln(1+2^(-10*2)*0.5))
// ((-ln(1-((0.9999999999999999))))/ln(e))/((-ln(1-((1 - 2^(-53)*0.5))))/ln(1+2^(-53)*0.5))
//
// ((-ln(1-((1-2^(-10*2)))))/ln(1.002))/((-ln(1-((1 - 2^(-10*2)))))/ln(1+2^(-9)))
// ((-ln(1-((2^(-10*2)))))/ln(1.002))/((-ln(1-((1 - 2^(-10*2)))))/ln(1+2^(-9)))
// ((-ln(1-((1-2^(-10*2)))))/ln(1.002))/((-ln(1-((1 - 2^(-10*2)))))/ln(1.002))
// ((ln(0.6)-ln(1-0.6)) - (ln(1/(2048*2048))-ln((1-1/(2048*2048)))))/((ln(1-1/(2048*2048))-ln(1-(1-1/(2048*2048)))) - (ln(1/(2048*2048))-ln((1-1/(2048*2048)))))
let inv_func = |x: f64| x/*exp_inverse_cdf*//*logit*//*hypsec_inverse_cdf*/;
2019-11-19 18:34:52 +00:00
let alt_exp_min_uniform = /*exp_inverse_cdf*//*logit*/inv_func(min_epsilon);
let alt_exp_max_uniform = /*exp_inverse_cdf*//*logit*/inv_func(max_epsilon);
// let erosion_pow = 2.0;
// let n_steps = 100;//150;
// let erosion_factor = |x: f64| logistic_cdf(erosion_pow * logit(x));
let log_odds = |x: f64| {
logit(x)
- logit(
/*erosion_center*/ alt_old_center_uniform, /*alt_old_center*/
)
};
/* let erosion_factor = |x: f64| logistic_cdf(logistic_base * if x <= /*erosion_center*/alt_old_center_uniform/*alt_old_center*/ { erosion_pow_low.ln() } else { erosion_pow_high.ln() } * log_odds(x))/*0.5 + (x - 0.5).signum() * ((x - 0.5).mul(2.0).abs(
).powf(erosion_pow).mul(0.5))*/; */
2019-11-19 18:34:52 +00:00
let erosion_factor = |x: f64| (/*if x <= /*erosion_center*/alt_old_center_uniform/*alt_old_center*/ { erosion_pow_low.ln() } else { erosion_pow_high.ln() } * */(/*exp_inverse_cdf*//*logit*/inv_func(x) - alt_exp_min_uniform) / (alt_exp_max_uniform - alt_exp_min_uniform))/*0.5 + (x - 0.5).signum() * ((x - 0.5).mul(2.0).abs(
2019-11-25 03:47:33 +00:00
).powf(erosion_pow).mul(0.5))*//*.powf(0.5)*//*.powf(1.5)*//*.powf(2.0)*/;
let rock_strength_div_factor = /*8.0*/(2.0 * TerrainChunkSize::RECT_SIZE.x as f64) / 8.0;
let n_func = |posi| {
if is_ocean_fn(posi) {
return 1.0;
}
let wposf = (uniform_idx_as_vec2(posi) * TerrainChunkSize::RECT_SIZE.map(|e| e as i32))
.map(|e| e as f64);
let turb_wposf = wposf
.div(TerrainChunkSize::RECT_SIZE.map(|e| e as f64))
.div(turb_wposf_div);
let turb = Vec2::new(
gen_ctx.turb_x_nz.get(turb_wposf.into_array()),
gen_ctx.turb_y_nz.get(turb_wposf.into_array()),
) * uplift_turb_scale
* TerrainChunkSize::RECT_SIZE.map(|e| e as f64);
// let turb = Vec2::zero();
let turb_wposf = wposf + turb;
let turb_wposi = turb_wposf
.map2(TerrainChunkSize::RECT_SIZE, |e, f| e / f as f64)
.map2(WORLD_SIZE, |e, f| (e as i32).max(f as i32 - 1).min(0));
let turb_posi = vec2_as_uniform_idx(turb_wposi);
let uheight = gen_ctx
.uplift_nz
.get(turb_wposf.into_array())
/* .min(0.5)
.max(-0.5)*/
.min(1.0)
.max(-1.0)
.mul(0.5)
.add(0.5);
/* if uheight > 0.8 {
1.5
} else {
1.0
} */
// ((1.5 - 0.6) * uheight + 0.6) as f32
// ((1.5 - 1.0) * uheight + 1.0) as f32
// ((3.5 - 1.5) * (1.0 - uheight) + 1.5) as f32
1.0
};
let theta_func = |posi| 0.4;
let kf_func = {
|posi| {
if is_ocean_fn(posi) {
return 1.0e-4;
// return 2.0e-5;
// return 2.0e-6;
// return 2.0e-10;
// return 0.0;
}
let wposf = (uniform_idx_as_vec2(posi)
* TerrainChunkSize::RECT_SIZE.map(|e| e as i32))
.map(|e| e as f64);
let turb_wposf = wposf
.div(TerrainChunkSize::RECT_SIZE.map(|e| e as f64))
.div(turb_wposf_div);
let turb = Vec2::new(
gen_ctx.turb_x_nz.get(turb_wposf.into_array()),
gen_ctx.turb_y_nz.get(turb_wposf.into_array()),
) * uplift_turb_scale
* TerrainChunkSize::RECT_SIZE.map(|e| e as f64);
// let turb = Vec2::zero();
let turb_wposf = wposf + turb;
let turb_wposi = turb_wposf
.map2(TerrainChunkSize::RECT_SIZE, |e, f| e / f as f64)
.map2(WORLD_SIZE, |e, f| (e as i32).max(f as i32 - 1).min(0));
let turb_posi = vec2_as_uniform_idx(turb_wposi);
let uheight = gen_ctx
.uplift_nz
.get(turb_wposf.into_array())
/* .min(0.5)
.max(-0.5)*/
.min(1.0)
.max(-1.0)
.mul(0.5)
.add(0.5);
let uchaos = /* gen_ctx.chaos_nz.get((wposf.div(3_000.0)).into_array())
.min(1.0)
.max(-1.0)
.mul(0.5)
.add(0.5); */
chaos[posi].1;
let oheight = /*alt_old*//*alt_base*/alt_old_no_ocean[/*(turb_posi / 64) * 64*/posi].0 as f64;
let oheight_2 = /*alt_old*//*alt_base*/(alt_old_no_ocean[/*(turb_posi / 64) * 64*/posi].1 as f64 / CONFIG.mountain_scale as f64);
// kf = 1.5e-4: high-high (plateau [fan sediment])
// kf = 1e-4: high (plateau)
// kf = 2e-5: normal (dike [unexposed])
// kf = 1e-6: normal-low (dike [exposed])
// kf = 2e-6: low (mountain)
// --
// kf = 2.5e-7 to 8e-7: very low (Cordonnier papers on plate tectonics)
// ((1.0 - uheight) * (1.5e-4 - 2.0e-6) + 2.0e-6) as f32
//
// ACTUAL recorded values worldwide: much lower...
//
// Or maybe not? Getting something like 2e-3...
//
// ...or 8.345e5.
// ((1.0 - uheight) * (5e-5 - 9.88e-15) + 9.88e-15)
// ((1.0 - uheight) * (1.5e-4 - 9.88e-15) + 9.88e-15)
// ((1.0 - uheight) * (8.345e5 - 2.0e-6) + 2.0e-6) as f32
// ((1.0 - uheight) * (1.5e-4 - 2.0e-6) + 2.0e-6)
// ((1.0 - uheight) * (0.5 + 0.5 * ((1.32 - uchaos as f64) / 1.32)) * (1.5e-4 - 2.0e-6) + 2.0e-6)
// ((1.0 - uheight) * (0.5 + 0.5 * /*((1.32 - uchaos as f64) / 1.32)*/oheight) * (1.5e-4 - 2.0e-6) + 2.0e-6)
// ((1.0 - uheight) * (0.5 - 0.5 * /*((1.32 - uchaos as f64) / 1.32)*/oheight_2) * (1.5e-4 - 2.0e-6) + 2.0e-6)
// ((1.0 - uheight) * (0.5 - 0.5 * /*((1.32 - uchaos as f64) / 1.32)*/oheight) * (1.5e-4 - 2.0e-6) + 2.0e-6)
// 2e-5
2.5e-6 * 16.0.powf(0.4) /* / 4.0 * 0.25 *//* * 4.0*/
// 2.9e-10
// ((1.0 - uheight) * (5e-5 - 2.9e-10) + 2.9e-10)
// ((1.0 - uheight) * (5e-5 - 2.9e-14) + 2.9e-14)
}
};
let kd_func = {
|posi| {
if is_ocean_fn(posi) {
return 1.0e-2;
}
let wposf = (uniform_idx_as_vec2(posi)
* TerrainChunkSize::RECT_SIZE.map(|e| e as i32))
.map(|e| e as f64);
let turb_wposf = wposf
.div(TerrainChunkSize::RECT_SIZE.map(|e| e as f64))
.div(turb_wposf_div);
let turb = Vec2::new(
gen_ctx.turb_x_nz.get(turb_wposf.into_array()),
gen_ctx.turb_y_nz.get(turb_wposf.into_array()),
) * uplift_turb_scale
* TerrainChunkSize::RECT_SIZE.map(|e| e as f64);
// let turb = Vec2::zero();
let turb_wposf = wposf + turb;
let turb_wposi = turb_wposf
.map2(TerrainChunkSize::RECT_SIZE, |e, f| e / f as f64)
.map2(WORLD_SIZE, |e, f| (e as i32).max(f as i32 - 1).min(0));
let turb_posi = vec2_as_uniform_idx(turb_wposi);
let uheight = gen_ctx
.uplift_nz
.get(turb_wposf.into_array())
/* .min(0.5)
.max(-0.5)*/
.min(1.0)
.max(-1.0)
.mul(0.5)
.add(0.5);
// kd = 1e-1: high (mountain, dike)
// kd = 1.5e-2: normal-high (plateau [fan sediment])
// kd = 1e-2: normal (plateau)
1.0e-2 * (1.0 / 16.0) // m_old^2 / y * (1 m_new / 4 m_old)^2
// (uheight * (1.0e-1 - 1.0e-2) + 1.0e-2)
}
};
let g_func = |posi| {
if
/*is_ocean_fn(posi)*/
map_edge_factor(posi) == 0.0 {
return 0.0;
// return 5.0;
}
let wposf = (uniform_idx_as_vec2(posi) * TerrainChunkSize::RECT_SIZE.map(|e| e as i32))
.map(|e| e as f64);
let turb_wposf = wposf
.div(TerrainChunkSize::RECT_SIZE.map(|e| e as f64))
.div(turb_wposf_div);
let turb = Vec2::new(
gen_ctx.turb_x_nz.get(turb_wposf.into_array()),
gen_ctx.turb_y_nz.get(turb_wposf.into_array()),
) * uplift_turb_scale
* TerrainChunkSize::RECT_SIZE.map(|e| e as f64);
// let turb = Vec2::zero();
let turb_wposf = wposf + turb;
let turb_wposi = turb_wposf
.map2(TerrainChunkSize::RECT_SIZE, |e, f| e / f as f64)
.map2(WORLD_SIZE, |e, f| (e as i32).max(f as i32 - 1).min(0));
let turb_posi = vec2_as_uniform_idx(turb_wposi);
let uheight = gen_ctx
.uplift_nz
.get(turb_wposf.into_array())
/* .min(0.5)
.max(-0.5)*/
.min(1.0)
.max(-1.0)
.mul(0.5)
.add(0.5);
let uchaos = /* gen_ctx.chaos_nz.get((wposf.div(3_000.0)).into_array())
.min(1.0)
.max(-1.0)
.mul(0.5)
.add(0.5); */
chaos[posi].1;
assert!(uchaos <= 1.32);
// G = d* v_s / p_0, where
// v_s is the settling velocity of sediment grains
// p_0 is the mean precipitation rate
// d* is the sediment concentration ratio (between concentration near riverbed
// interface, and average concentration over the water column).
// d* varies with Rouse number which defines relative contribution of bed, suspended,
// and washed loads.
//
// G is typically on the order of 1 or greater. However, we are only guaranteed to
// converge for G ≤ 1, so we keep it in the chaos range of [0.12, 1.32].
// (((1.32 - uchaos) / 1.32).powf(0.75) * 1.32).min(/*1.1*/1.0)
// ((1.32 - 0.12) * (1.0 - uheight) + 0.12) as f32
// 1.1 * (1.0 - uheight) as f32
// 1.0 * (1.0 - uheight) as f32
// 1.0
// 5.0
// 10.0
// 2.0
// 0.0
1.0
// 1.5
};
let epsilon_0_func = |posi| {
if is_ocean_fn(posi) {
// marine: ε₀ = 2.078e-3
return 2.078e-3;
// return 5.0;
}
let wposf = (uniform_idx_as_vec2(posi) * TerrainChunkSize::RECT_SIZE.map(|e| e as i32))
.map(|e| e as f64);
let turb_wposf = wposf
.div(TerrainChunkSize::RECT_SIZE.map(|e| e as f64))
.div(turb_wposf_div);
let turb = Vec2::new(
gen_ctx.turb_x_nz.get(turb_wposf.into_array()),
gen_ctx.turb_y_nz.get(turb_wposf.into_array()),
) * uplift_turb_scale
* TerrainChunkSize::RECT_SIZE.map(|e| e as f64);
// let turb = Vec2::zero();
let turb_wposf = wposf + turb;
let turb_wposi = turb_wposf
.map2(TerrainChunkSize::RECT_SIZE, |e, f| e / f as f64)
.map2(WORLD_SIZE, |e, f| (e as i32).max(f as i32 - 1).min(0));
let turb_posi = vec2_as_uniform_idx(turb_wposi);
let uheight = gen_ctx
.uplift_nz
.get(turb_wposf.into_array())
/* .min(0.5)
.max(-0.5)*/
.min(1.0)
.max(-1.0)
.mul(0.5)
.add(0.5);
let wposf3 = Vec3::new(
wposf.x,
wposf.y,
uheight * CONFIG.mountain_scale as f64 * rock_strength_div_factor,
);
let rock_strength = gen_ctx
.rock_strength_nz
.get(wposf3.into_array())
/* .min(0.5)
.max(-0.5)*/
.min(1.0)
.max(-1.0)
.mul(0.5)
.add(0.5);
let center = /*0.25*/0.4;
let dmin = center - /*0.15;//0.05*/0.05;
let dmax = center + /*0.05*//*0.10*/0.05; //0.05;
let log_odds = |x: f64| logit(x) - logit(center);
let ustrength = logistic_cdf(
1.0 * logit(rock_strength.min(1.0f64 - 1e-7).max(1e-7))
+ 1.0 * log_odds(uheight.min(dmax).max(dmin)),
);
// marine: ε₀ = 2.078e-3
// San Gabriel Mountains: ε₀ = 3.18e-4
// Oregon Coast Range: ε₀ = 2.68e-4
// Frogs Hollow (peak production = 0.25): ε₀ = 1.41e-4
// Point Reyes: ε₀ = 8.1e-5
// Nunnock River (fractured granite, least weathered?): ε₀ = 5.3e-5
// The stronger the rock, the lower the production rate of exposed bedrock.
// ((1.0 - ustrength) * (/*3.18e-4*/2.078e-3 - 5.3e-5) + 5.3e-5) as f32
0.0
};
let alpha_func = |posi| {
if is_ocean_fn(posi) {
// marine: α = 3.7e-2
return 3.7e-2;
}
let wposf = (uniform_idx_as_vec2(posi) * TerrainChunkSize::RECT_SIZE.map(|e| e as i32))
.map(|e| e as f64);
let turb_wposf = wposf
.div(TerrainChunkSize::RECT_SIZE.map(|e| e as f64))
.div(turb_wposf_div);
let turb = Vec2::new(
gen_ctx.turb_x_nz.get(turb_wposf.into_array()),
gen_ctx.turb_y_nz.get(turb_wposf.into_array()),
) * uplift_turb_scale
* TerrainChunkSize::RECT_SIZE.map(|e| e as f64);
// let turb = Vec2::zero();
let turb_wposf = wposf + turb;
let turb_wposi = turb_wposf
.map2(TerrainChunkSize::RECT_SIZE, |e, f| e / f as f64)
.map2(WORLD_SIZE, |e, f| (e as i32).max(f as i32 - 1).min(0));
let turb_posi = vec2_as_uniform_idx(turb_wposi);
let uheight = gen_ctx
.uplift_nz
.get(turb_wposf.into_array())
/* .min(0.5)
.max(-0.5)*/
.min(1.0)
.max(-1.0)
.mul(0.5)
.add(0.5);
let wposf3 = Vec3::new(
wposf.x,
wposf.y,
uheight * CONFIG.mountain_scale as f64 * rock_strength_div_factor,
);
let rock_strength = gen_ctx
.rock_strength_nz
.get(wposf3.into_array())
/* .min(0.5)
.max(-0.5)*/
.min(1.0)
.max(-1.0)
.mul(0.5)
.add(0.5);
let center = /*0.25*/0.4;
let dmin = center - /*0.15;//0.05*/0.05;
let dmax = center + /*0.05*//*0.10*/0.05; //0.05;
let log_odds = |x: f64| logit(x) - logit(center);
let ustrength = logistic_cdf(
1.0 * logit(rock_strength.min(1.0f64 - 1e-7).max(1e-7))
+ 1.0 * log_odds(uheight.min(dmax).max(dmin)),
);
// Frog Hollow (peak production = 0.25): α = 4.2e-2
// San Gabriel Mountains: α = 3.8e-2
// marine: α = 3.7e-2
// Oregon Coast Range: α = 3e-2
// Nunnock river (fractured granite, least weathered?): α = 2e-3
// Point Reyes: α = 1.6e-2
// The stronger the rock, the faster the decline in soil production.
(ustrength * (4.2e-2 - 1.6e-2) + 1.6e-2) as f32
};
let uplift_fn = |posi| {
if is_ocean_fn(posi) {
/* return 1e-2
.mul(max_erosion_per_delta_t) as f32; */
return 0.0;
}
let wposf = (uniform_idx_as_vec2(posi) * TerrainChunkSize::RECT_SIZE.map(|e| e as i32))
.map(|e| e as f64);
let alt_main = {
// Extension upwards from the base. A positive number from 0 to 1 curved to be
// maximal at 0. Also to be multiplied by CONFIG.mountain_scale.
let alt_main = (gen_ctx
.alt_nz
.get((wposf.div(2_000.0)).into_array())
.min(1.0)
.max(-1.0))
.abs()
.powf(1.35);
fn spring(x: f64, pow: f64) -> f64 {
x.abs().powf(pow) * x.signum()
}
(0.0 + alt_main
+ (gen_ctx
.small_nz
.get((wposf.div(300.0)).into_array())
.min(1.0)
.max(-1.0))
.mul(alt_main.powf(0.8).max(/*0.25*/ 0.15))
.mul(0.3)
.add(1.0)
.mul(0.4)/* + spring(alt_main.abs().powf(0.5).min(0.75).mul(60.0).sin(), 4.0)
.mul(0.045)*/)
};
let height =
((/*old_height_uniform*/uplift_uniform[posi]./*0*/1 - alt_old_min_uniform) as f64
/ (alt_old_max_uniform - alt_old_min_uniform) as f64)
/*((old_height(posi) - alt_old_min) as f64
/ (alt_old_max - alt_old_min) as f64)*/
;
let height = height.mul(max_epsilon - min_epsilon).add(min_epsilon);
/*.max(1e-7 / CONFIG.mountain_scale as f64)
.min(1.0f64 - 1e-7);*/
/* let alt_main = {
// Extension upwards from the base. A positive number from 0 to 1 curved to be
// maximal at 0. Also to be multiplied by CONFIG.mountain_scale.
let alt_main = (gen_ctx
.alt_nz
.get((wposf.div(2_000.0)).into_array())
.min(1.0)
.max(-1.0))
.abs()
.powf(1.35);
fn spring(x: f64, pow: f64) -> f64 {
x.abs().powf(pow) * x.signum()
}
(0.0 + alt_main
+ (gen_ctx
.small_nz
.get((wposf.div(300.0)).into_array())
.min(1.0)
.max(-1.0))
.mul(alt_main.powf(0.8).max(/*0.25*/ 0.15))
.mul(0.3)
.add(1.0)
.mul(0.4)
+ spring(alt_main.abs().powf(0.5).min(0.75).mul(60.0).sin(), 4.0).mul(0.045))
}; */
// let height = height + (alt_main./*to_le_bytes()[7]*/to_bits() & 1) as f64 * ((1.0 / CONFIG.mountain_scale as f64).powf(1.0 / erosion_pow_low));
let height = erosion_factor(height);
assert!(height >= 0.0);
assert!(height <= 1.0);
// assert!(alt_main >= 0.0);
let (bump_factor, bump_max) = if
/*height < f32::EPSILON as f64 * 0.5*//*false*/
/*true*/
false {
(
/*(alt_main./*to_le_bytes()[7]*/to_bits() & 1) as f64*/
(alt_main / CONFIG.mountain_scale as f64 * 128.0).mul(0.1).powf(1.2) * /*(1.0 / CONFIG.mountain_scale as f64)*/(f32::EPSILON * 0.5) as f64,
(f32::EPSILON * 0.5) as f64,
)
} else {
(0.0, 0.0)
2019-11-19 18:34:52 +00:00
};
// tan(6/360*2*pi)*32 ~ 3.4
// 3.4/32*512 ~ 54
// 18/32*512 ~ 288
// tan(pi/6)*32 ~ 18
// tan(54/360*2*pi)*32
// let height = 1.0f64;
let turb_wposf = wposf
.div(TerrainChunkSize::RECT_SIZE.map(|e| e as f64))
.div(turb_wposf_div);
let turb = Vec2::new(
gen_ctx.turb_x_nz.get(turb_wposf.into_array()),
gen_ctx.turb_y_nz.get(turb_wposf.into_array()),
) * uplift_turb_scale
* TerrainChunkSize::RECT_SIZE.map(|e| e as f64);
let turb_wposf = wposf + turb;
let uheight = gen_ctx
.uplift_nz
.get(turb_wposf.into_array())
/* .min(0.5)
.max(-0.5)*/
.min(1.0)
.max(-1.0)
.mul(0.5)
.add(0.5);
// u = 1e-3: normal-high (dike, mountain)
// u = 5e-4: normal (mid example in Yuan, average mountain uplift)
// u = 2e-4: low (low example in Yuan; known that lagoons etc. may have u ~ 0.05).
// u = 0: low (plateau [fan, altitude = 0.0])
// let height = uheight;
// let height = 1.0f64;
// let height = 1.0 / 7.0f64;
// let height = 0.0 / 31.0f64;
let bfrac = /*erosion_factor(0.5);*/0.0;
let height = (height - bfrac).abs().div(1.0 - bfrac);
let height = height
/* .mul(31.0 / 32.0)
.add(1.0 / 32.0) */
/* .mul(15.0 / 16.0)
.add(1.0 / 16.0) */
/* .mul(5.0 / 8.0)
.add(3.0 / 8.0) */
/* .mul(6.0 / 8.0)
.add(2.0 / 8.0) */
/* .mul(7.0 / 8.0)
.add(1.0 / 8.0) */
.mul(max_erosion_per_delta_t)
.sub(/*1.0 / CONFIG.mountain_scale as f64*/ bump_max)
.add(bump_factor);
/* .sub(/*1.0 / CONFIG.mountain_scale as f64*/(f32::EPSILON * 0.5) as f64)
.add(bump_factor); */
height as f64
};
2019-11-19 18:34:52 +00:00
let alt_func = |posi| {
if is_ocean_fn(posi) {
// -max_erosion_per_delta_t as f32
// -1.0 / CONFIG.mountain_scale
// -0.75
// -CONFIG.sea_level / CONFIG.mountain_scale
// 0.0
// 0.0
2019-11-19 18:34:52 +00:00
old_height(posi) // 0.0
} else {
// uplift_fn(posi)
let wposf = (uniform_idx_as_vec2(posi)
* TerrainChunkSize::RECT_SIZE.map(|e| e as i32))
.map(|e| e as f64);
let alt_main = {
// Extension upwards from the base. A positive number from 0 to 1 curved to be
// maximal at 0. Also to be multiplied by CONFIG.mountain_scale.
let alt_main = (gen_ctx
.alt_nz
.get((wposf.div(2_000.0)).into_array())
.min(1.0)
.max(-1.0))
.abs()
.powf(1.35);
fn spring(x: f64, pow: f64) -> f64 {
x.abs().powf(pow) * x.signum()
}
(0.0 + alt_main
+ (gen_ctx
.small_nz
.get((wposf.div(300.0)).into_array())
.min(1.0)
.max(-1.0))
.mul(alt_main.powf(0.8).max(/*0.25*/ 0.15))
.mul(0.3)
.add(1.0)
.mul(0.4)/* + spring(alt_main.abs().powf(0.5).min(0.75).mul(60.0).sin(), 4.0)
.mul(0.045)*/)
2019-11-19 18:34:52 +00:00
};
// (kf_func(posi) / 1.5e-4 * CONFIG.mountain_scale as f64) as f32
// (old_height_uniform(posi) as f64 * CONFIG.mountain_scale as f64) as f32
// (old_height_uniform(posi) as f64 * CONFIG.mountain_scale as f64) as f32
// (uplift_fn(posi) * CONFIG.mountain_scale as f64) as f32
// (old_height_uniform(posi) - 0.5)/* * max_erosion_per_delta_t as f32*/
(old_height(posi) as f64 / CONFIG.mountain_scale as f64) as f32 - 0.5
// ((((old_height(posi) - alt_old_min) as f64 / (alt_old_max - alt_old_min) as f64) - 0.25) * (CONFIG.mountain_scale as f64)) as f32
// old_height(posi)/* * max_erosion_per_delta_t as f32*/
// uplift_fn(posi) * (CONFIG.mountain_scale / max_erosion_per_delta_t as f32)
// 0.0
// /*-CONFIG.mountain_scale * 0.5 + *//*-CONFIG.mountain_scale/* * 0.75*/ + */(old_height_uniform(posi)/*.powf(2.0)*/ - 0.5)/* * CONFIG.mountain_scale as f32*/
// uplift_fn(posi) * (CONFIG.mountain_scale / max_erosion_per_delta_t as f32)
// 0.0
2019-11-19 18:34:52 +00:00
/* // 0.0
// -/*CONFIG.sea_level / CONFIG.mountain_scale*//* 0.75 */1.0
// ((old_height(posi) - alt_old_min) as f64 / (alt_old_max - alt_old_min) as f64) as f32
// uplift_fn(posi) / max_erosion_per_delta_t as f32
// old_height_uniform(posi) *
(/*((old_height(posi) - alt_old_min) as f64 / (alt_old_max - alt_old_min) as f64) **/(((6.0 / 360.0 * 2.0 * f64::consts::PI).tan()
* TerrainChunkSize::RECT_SIZE.reduce_partial_min() as f64)
.floor()
* height_scale)) as f32
// 5.0 / CONFIG.mountain_scale */
}
};
/* // FIXME: Remove.
let is_ocean = (0..WORLD_SIZE.x * WORLD_SIZE.y)
.into_par_iter()
.map(|i| map_edge_factor(i) == 0.0)
.collect::<Vec<_>>();
let is_ocean_fn = |posi: usize| is_ocean[posi]; */
// Load map, if necessary.
let parsed_world_file = (|| {
if let FileOpts::Load(ref path) = opts.world_file {
let file = match File::open(path) {
Ok(file) => file,
Err(err) => {
log::warn!("Couldn't read path for maps: {:?}", err);
return None;
}
};
let reader = BufReader::new(file);
let map: WorldFile = match bincode::deserialize_from(reader) {
Ok(map) => map,
Err(err) => {
log::warn!("Couldn't parse map: {:?})", err);
return None;
}
};
if map.alt.len() != map.basement.len()
|| map.alt.len() != WORLD_SIZE.x as usize * WORLD_SIZE.y as usize
{
log::warn!("World size of map is invalid.");
return None;
}
/* let f = |h| h;// / 4.0;
let mut map = map;
map.alt.par_iter_mut()
.zip(map.basement.par_iter_mut())
.for_each(|(mut h, mut b)| {
*h = f(*h);
*b = f(*b);
}); */
Some(map)
} else {
None
}
})();
let (alt, basement /*, alluvium*/) = if let Some(map) = parsed_world_file {
// let map_len = map.alt.len();
(
map.alt,
map.basement, /* vec![0.0; map_len].into_boxed_slice() */
)
} else {
let (alt, basement /*, alluvium*/) = do_erosion(
0.0,
max_erosion_per_delta_t as f32,
n_steps,
&river_seed,
&rock_strength_nz,
|posi| alt_func(posi), // + if is_ocean_fn(posi) { 0.0 } else { 128.0 },
|posi| {
alt_func(posi)
- if is_ocean_fn(posi) {
0.0
} else {
/*1400.0*//*CONFIG.mountain_scale * 0.75*/
0.0
}
}, // if is_ocean_fn(posi) { old_height(posi) } else { 0.0 },
// |posi| 0.0,
is_ocean_fn,
uplift_fn,
|posi| n_func(posi),
|posi| theta_func(posi),
|posi| kf_func(posi),
|posi| kd_func(posi),
|posi| g_func(posi),
|posi| epsilon_0_func(posi),
|posi| alpha_func(posi),
);
// Quick "small scale" erosion cycle in order to lower extreme angles.
do_erosion(
0.0,
(1.0 * height_scale) as f32,
n_small_steps,
&river_seed,
&rock_strength_nz,
|posi| /* if is_ocean_fn(posi) { old_height(posi) } else { alt[posi] } *//*alt[posi] as f32*/(alt[posi]/* + alluvium[posi]*/) as f32,
|posi| basement[posi] as f32,
// |posi| /*alluvium[posi] as f32*/0.0f32,
is_ocean_fn,
|posi| uplift_fn(posi) * (1.0 * height_scale / max_erosion_per_delta_t),
|posi| n_func(posi),
|posi| theta_func(posi),
|posi| kf_func(posi),
|posi| kd_func(posi),
|posi| g_func(posi),
|posi| epsilon_0_func(posi),
|posi| alpha_func(posi),
)
};
// Save map, if necessary.
let map = WorldFile { alt, basement };
(|| {
if let FileOpts::Save = opts.world_file {
use std::time::SystemTime;
// Check if folder exists and create it if it does not
let mut path = PathBuf::from("./maps");
if !path.exists() {
if let Err(err) = std::fs::create_dir(&path) {
log::warn!("Couldn't create folder for map: {:?}", err);
return;
}
}
path.push(format!(
// TODO: Work out a nice bincode file extension.
"map_{}.bin",
SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.map(|d| d.as_millis())
.unwrap_or(0)
));
let file = match File::create(path) {
Ok(file) => file,
Err(err) => {
log::warn!("Couldn't create file for maps: {:?}", err);
return;
}
};
let writer = BufWriter::new(file);
if let Err(err) = bincode::serialize_into(writer, &map) {
log::warn!("Couldn't write map: {:?}", err);
}
}
})();
let (alt, basement) = (map.alt, map.basement);
// Additional small-scale eroson after map load, only used during testing.
let (alt, basement /*, alluvium*/) = if n_post_load_steps == 0 {
(alt, basement /*, alluvium*/)
} else {
do_erosion(
0.0,
(1.0 * height_scale) as f32,
n_post_load_steps,
&river_seed,
&rock_strength_nz,
|posi| /* if is_ocean_fn(posi) { old_height(posi) } else { alt[posi] } */alt[posi] as f32,
|posi| basement[posi] as f32,
// |posi| alluvium[posi] as f32,
is_ocean_fn,
|posi| uplift_fn(posi) * (1.0 * height_scale / max_erosion_per_delta_t),
|posi| n_func(posi),
|posi| theta_func(posi),
|posi| kf_func(posi),
|posi| kd_func(posi),
|posi| g_func(posi),
|posi| epsilon_0_func(posi),
|posi| alpha_func(posi),
)
};
2019-11-19 18:34:52 +00:00
let is_ocean = get_oceans(|posi| alt[posi]);
let is_ocean_fn = |posi: usize| is_ocean[posi];
let mut dh = downhill(
|posi| alt[posi], /*&alt*/
/*old_height*/ is_ocean_fn,
);
let (boundary_len, indirection, water_alt_pos, maxh) =
get_lakes(/*&/*water_alt*/alt*/ |posi| alt[posi], &mut dh);
log::debug!("Max height: {:?}", maxh);
let (mrec, mstack, mwrec) = {
let mut wh = vec![0.0; WORLD_SIZE.x * WORLD_SIZE.y];
get_multi_rec(
|posi| alt[posi],
&dh,
&water_alt_pos,
&mut wh,
WORLD_SIZE.x,
WORLD_SIZE.y,
TerrainChunkSize::RECT_SIZE.x as Compute,
TerrainChunkSize::RECT_SIZE.y as Compute,
maxh,
)
};
let flux_old = get_multi_drainage(&mstack, &mrec, &*mwrec, boundary_len);
let flux_rivers = get_drainage(&water_alt_pos, &dh, boundary_len);
// let flux_rivers = flux_old.clone();
let water_height_initial = |chunk_idx| {
let indirection_idx = indirection[chunk_idx];
// Find the lake this point is flowing into.
let lake_idx = if indirection_idx < 0 {
chunk_idx
} else {
indirection_idx as usize
};
/* // Find the pass this lake is flowing into (i.e. water at the lake bottom gets
// pushed towards the point identified by pass_idx).
let neighbor_pass_idx = dh[lake_idx]; */
let chunk_water_alt = if
/*neighbor_pass_idx*/
dh[lake_idx] < 0 {
// This is either a boundary node (dh[chunk_idx] == -2, i.e. water is at sea level)
// or part of a lake that flows directly into the ocean. In the former case, water
// is at sea level so we just return 0.0. In the latter case, the lake bottom must
// have been a boundary node in the first place--meaning this node flows directly
// into the ocean. In that case, its lake bottom is ocean, meaning its water is
// also at sea level. Thus, we return 0.0 in both cases.
0.0
} else {
// This chunk is draining into a body of water that isn't the ocean (i.e., a lake).
// Then we just need to find the pass height of the surrounding lake in order to
// figure out the initial water height (which fill_sinks will then extend to make
// sure it fills the entire basin).
// Find the height of "our" side of the pass (the part of it that drains into this
// chunk's lake).
let pass_idx = -indirection[lake_idx] as usize;
let pass_height_i = alt[pass_idx];
// Find the pass this lake is flowing into (i.e. water at the lake bottom gets
// pushed towards the point identified by pass_idx).
let neighbor_pass_idx = dh[pass_idx/*lake_idx*/];
// Find the height of the pass into which our lake is flowing.
let pass_height_j = alt[neighbor_pass_idx as usize];
// Find the maximum of these two heights.
let pass_height = pass_height_i.max(pass_height_j);
// Use the pass height as the initial water altitude.
pass_height
};
// Use the maximum of the pass height and chunk height as the parameter to fill_sinks.
let chunk_alt = alt[chunk_idx];
chunk_alt.max(chunk_water_alt)
};
let water_alt = fill_sinks(water_height_initial, is_ocean_fn);
/* let water_alt = (0..WORLD_SIZE.x * WORLD_SIZE.y)
.into_par_iter()
.map(|posi| water_height_initial(posi))
.collect::<Vec<_>>(); */
let rivers = get_rivers(&water_alt_pos, &water_alt, &dh, &indirection, &flux_rivers);
let water_alt = indirection
.par_iter()
.enumerate()
.map(|(chunk_idx, &indirection_idx)| {
// Find the lake this point is flowing into.
let lake_idx = if indirection_idx < 0 {
chunk_idx
} else {
indirection_idx as usize
};
/* // Find the pass this lake is flowing into (i.e. water at the lake bottom gets
// pushed towards the point identified by pass_idx).
let neighbor_pass_idx = dh[lake_idx]; */
if
/*neighbor_pass_idx*/
dh[lake_idx] < 0 {
// This is either a boundary node (dh[chunk_idx] == -2, i.e. water is at sea level)
// or part of a lake that flows directly into the ocean. In the former case, water
// is at sea level so we just return 0.0. In the latter case, the lake bottom must
// have been a boundary node in the first place--meaning this node flows directly
// into the ocean. In that case, its lake bottom is ocean, meaning its water is
// also at sea level. Thus, we return 0.0 in both cases.
0.0
} else {
// This is not flowing into the ocean, so we can use the existing water_alt.
water_alt[chunk_idx] as f32
}
})
.collect::<Vec<_>>()
.into_boxed_slice();
let is_underwater = |chunk_idx: usize| match rivers[chunk_idx].river_kind {
Some(RiverKind::Ocean) | Some(RiverKind::Lake { .. }) => true,
Some(RiverKind::River { .. }) => false, // TODO: inspect width
None => false,
};
// Check whether any tiles around this tile are not water (since Lerp will ensure that they
// are included).
let pure_water = |posi: usize| {
/* let river_data = &rivers[posi];
match river_data.river_kind {
Some(RiverKind::Lake { .. }) => {
// Lakes are always completely submerged.
return true;
},
/* Some(RiverKind::River { cross_section }) if cross_section.x >= TerrainChunkSize::RECT_SIZE.x as f32 => {
// Rivers that are wide enough are considered completely submerged (not a
// completely fair approximation).
return true;
}, */
_ => {}
} */
let pos = uniform_idx_as_vec2(posi);
for x in pos.x - 1..(pos.x + 1) + 1 {
for y in pos.y - 1..(pos.y + 1) + 1 {
if x >= 0 && y >= 0 && x < WORLD_SIZE.x as i32 && y < WORLD_SIZE.y as i32 {
let posi = vec2_as_uniform_idx(Vec2::new(x, y));
if !is_underwater(posi) {
2019-08-25 16:17:51 +00:00
return false;
}
}
}
}
true
};
// NaNs in these uniform vectors wherever pure_water() returns true.
let (((alt_no_water, _), (pure_flux, _)), ((temp_base, _), (humid_base, _))) = rayon::join(
|| {
rayon::join(
|| {
uniform_noise(|posi, _| {
if pure_water(posi) {
None
} else {
// A version of alt that is uniform over *non-water* (or land-adjacent water)
// chunks.
Some(alt[posi] as f32)
}
})
},
|| {
uniform_noise(|posi, _| {
if pure_water(posi) {
None
} else {
Some(flux_old[posi])
}
})
},
)
},
|| {
rayon::join(
|| {
uniform_noise(|posi, wposf| {
if pure_water(posi) {
None
} else {
// -1 to 1.
Some(gen_ctx.temp_nz.get((wposf/*.div(12000.0)*/).into_array())
as f32)
}
})
},
|| {
uniform_noise(|posi, wposf| {
// Check whether any tiles around this tile are water.
if pure_water(posi) {
None
} else {
// 0 to 1, hopefully.
Some(
(gen_ctx.humid_nz.get(wposf.div(1024.0).into_array()) as f32)
.add(1.0)
.mul(0.5),
)
}
})
},
2019-08-24 19:32:07 +00:00
)
},
);
2019-08-21 18:41:32 +00:00
let gen_cdf = GenCdf {
humid_base,
temp_base,
2019-08-22 19:03:42 +00:00
chaos,
2019-08-22 22:57:42 +00:00
alt,
2019-11-19 18:34:52 +00:00
basement,
water_alt,
dh,
flux: flux_old,
pure_flux,
alt_no_water,
rivers,
2019-08-21 18:41:32 +00:00
};
let chunks = (0..WORLD_SIZE.x * WORLD_SIZE.y)
.into_par_iter()
.map(|i| SimChunk::generate(i, &gen_ctx, &gen_cdf))
.collect::<Vec<_>>();
2019-06-10 16:28:02 +00:00
let mut this = Self {
seed: seed,
chunks,
2019-06-25 15:59:09 +00:00
locations: Vec::new(),
gen_ctx,
rng,
2019-06-10 16:28:02 +00:00
};
if opts.seed_elements {
this.seed_elements();
}
2019-06-10 16:28:02 +00:00
this
}
/// Draw a map of the world based on chunk information. Returns a buffer of u32s.
pub fn get_map(&self) -> Vec<u32> {
let mut v = vec![0u32; WORLD_SIZE.x * WORLD_SIZE.y];
// TODO: Parallelize again.
MapConfig::default().generate(&self, |pos, (r, g, b, a)| {
v[pos.y * WORLD_SIZE.x + pos.x] = u32::from_le_bytes([r, g, b, a]);
});
v
}
2019-06-18 21:22:31 +00:00
/// Prepare the world for simulation
pub fn seed_elements(&mut self) {
let mut rng = self.rng.clone();
let random_loc = RandomField::new(rng.gen());
2019-06-18 21:22:31 +00:00
2019-07-03 19:58:09 +00:00
let cell_size = 16;
2019-06-22 21:44:27 +00:00
let grid_size = WORLD_SIZE / cell_size;
2019-07-03 19:58:09 +00:00
let loc_count = 100;
2019-06-22 21:44:27 +00:00
2019-06-25 15:59:09 +00:00
let mut loc_grid = vec![None; grid_size.product()];
let mut locations = Vec::new();
2019-06-22 21:44:27 +00:00
// Seed the world with some locations
(0..loc_count).for_each(|_| {
2019-06-22 21:44:27 +00:00
let cell_pos = Vec2::new(
self.rng.gen::<usize>() % grid_size.x,
self.rng.gen::<usize>() % grid_size.y,
2019-06-18 21:22:31 +00:00
);
2019-06-25 15:59:09 +00:00
let wpos = (cell_pos * cell_size + cell_size / 2)
common: Rework volume API See the doc comments in `common/src/vol.rs` for more information on the API itself. The changes include: * Consistent `Err`/`Error` naming. * Types are named `...Error`. * `enum` variants are named `...Err`. * Rename `VolMap{2d, 3d}` -> `VolGrid{2d, 3d}`. This is in preparation to an upcoming change where a “map” in the game related sense will be added. * Add volume iterators. There are two types of them: * _Position_ iterators obtained from the trait `IntoPosIterator` using the method `fn pos_iter(self, lower_bound: Vec3<i32>, upper_bound: Vec3<i32>) -> ...` which returns an iterator over `Vec3<i32>`. * _Volume_ iterators obtained from the trait `IntoVolIterator` using the method `fn vol_iter(self, lower_bound: Vec3<i32>, upper_bound: Vec3<i32>) -> ...` which returns an iterator over `(Vec3<i32>, &Self::Vox)`. Those traits will usually be implemented by references to volume types (i.e. `impl IntoVolIterator<'a> for &'a T` where `T` is some type which usually implements several volume traits, such as `Chunk`). * _Position_ iterators iterate over the positions valid for that volume. * _Volume_ iterators do the same but return not only the position but also the voxel at that position, in each iteration. * Introduce trait `RectSizedVol` for the use case which we have with `Chonk`: A `Chonk` is sized only in x and y direction. * Introduce traits `RasterableVol`, `RectRasterableVol` * `RasterableVol` represents a volume that is compile-time sized and has its lower bound at `(0, 0, 0)`. The name `RasterableVol` was chosen because such a volume can be used with `VolGrid3d`. * `RectRasterableVol` represents a volume that is compile-time sized at least in x and y direction and has its lower bound at `(0, 0, z)`. There's no requirement on he lower bound or size in z direction. The name `RectRasterableVol` was chosen because such a volume can be used with `VolGrid2d`.
2019-09-03 22:23:29 +00:00
.map2(TerrainChunkSize::RECT_SIZE, |e, sz: u32| {
2019-06-25 15:59:09 +00:00
e as i32 * sz as i32 + sz as i32 / 2
2019-06-23 19:43:02 +00:00
});
2019-06-22 21:44:27 +00:00
2019-06-25 15:59:09 +00:00
locations.push(Location::generate(wpos, &mut rng));
loc_grid[cell_pos.y * grid_size.x + cell_pos.x] = Some(locations.len() - 1);
});
2019-06-25 15:59:09 +00:00
// Find neighbours
let mut loc_clone = locations
.iter()
.map(|l| l.center)
.enumerate()
.collect::<Vec<_>>();
(0..locations.len()).for_each(|i| {
let pos = locations[i].center.map(|e| e as i64);
2019-06-25 15:59:09 +00:00
loc_clone.sort_by_key(|(_, l)| l.map(|e| e as i64).distance_squared(pos));
2019-06-25 15:59:09 +00:00
2019-06-26 00:27:41 +00:00
loc_clone.iter().skip(1).take(2).for_each(|(j, _)| {
locations[i].neighbours.insert(*j);
locations[*j].neighbours.insert(i);
});
});
2019-06-18 21:22:31 +00:00
2019-06-22 21:44:27 +00:00
// Simulate invasion!
let invasion_cycles = 25;
(0..invasion_cycles).for_each(|_| {
(0..grid_size.y).for_each(|j| {
(0..grid_size.x).for_each(|i| {
2019-06-25 15:59:09 +00:00
if loc_grid[j * grid_size.x + i].is_none() {
2019-06-22 21:44:27 +00:00
const R_COORDS: [i32; 5] = [-1, 0, 1, 0, -1];
let idx = self.rng.gen::<usize>() % 4;
let new_i = i as i32 + R_COORDS[idx];
let new_j = j as i32 + R_COORDS[idx + 1];
if new_i >= 0 && new_j >= 0 {
let loc = Vec2::new(new_i as usize, new_j as usize);
loc_grid[j * grid_size.x + i] =
loc_grid.get(loc.y * grid_size.x + loc.x).cloned().flatten();
}
2019-06-22 21:44:27 +00:00
}
});
});
});
2019-06-22 21:44:27 +00:00
// Place the locations onto the world
let gen = StructureGen2d::new(self.seed, cell_size as u32, cell_size as u32 / 2);
self.chunks
.par_iter_mut()
.enumerate()
.for_each(|(ij, chunk)| {
let chunk_pos = uniform_idx_as_vec2(ij);
let i = chunk_pos.x as usize;
let j = chunk_pos.y as usize;
2019-06-26 00:27:41 +00:00
let block_pos = Vec2::new(
common: Rework volume API See the doc comments in `common/src/vol.rs` for more information on the API itself. The changes include: * Consistent `Err`/`Error` naming. * Types are named `...Error`. * `enum` variants are named `...Err`. * Rename `VolMap{2d, 3d}` -> `VolGrid{2d, 3d}`. This is in preparation to an upcoming change where a “map” in the game related sense will be added. * Add volume iterators. There are two types of them: * _Position_ iterators obtained from the trait `IntoPosIterator` using the method `fn pos_iter(self, lower_bound: Vec3<i32>, upper_bound: Vec3<i32>) -> ...` which returns an iterator over `Vec3<i32>`. * _Volume_ iterators obtained from the trait `IntoVolIterator` using the method `fn vol_iter(self, lower_bound: Vec3<i32>, upper_bound: Vec3<i32>) -> ...` which returns an iterator over `(Vec3<i32>, &Self::Vox)`. Those traits will usually be implemented by references to volume types (i.e. `impl IntoVolIterator<'a> for &'a T` where `T` is some type which usually implements several volume traits, such as `Chunk`). * _Position_ iterators iterate over the positions valid for that volume. * _Volume_ iterators do the same but return not only the position but also the voxel at that position, in each iteration. * Introduce trait `RectSizedVol` for the use case which we have with `Chonk`: A `Chonk` is sized only in x and y direction. * Introduce traits `RasterableVol`, `RectRasterableVol` * `RasterableVol` represents a volume that is compile-time sized and has its lower bound at `(0, 0, 0)`. The name `RasterableVol` was chosen because such a volume can be used with `VolGrid3d`. * `RectRasterableVol` represents a volume that is compile-time sized at least in x and y direction and has its lower bound at `(0, 0, z)`. There's no requirement on he lower bound or size in z direction. The name `RectRasterableVol` was chosen because such a volume can be used with `VolGrid2d`.
2019-09-03 22:23:29 +00:00
chunk_pos.x * TerrainChunkSize::RECT_SIZE.x as i32,
chunk_pos.y * TerrainChunkSize::RECT_SIZE.y as i32,
2019-06-26 00:27:41 +00:00
);
2019-07-01 18:40:41 +00:00
let _cell_pos = Vec2::new(i / cell_size, j / cell_size);
2019-06-22 21:44:27 +00:00
// Find the distance to each region
let near = gen.get(chunk_pos);
let mut near = near
.iter()
.map(|(pos, seed)| RegionInfo {
chunk_pos: *pos,
common: Rework volume API See the doc comments in `common/src/vol.rs` for more information on the API itself. The changes include: * Consistent `Err`/`Error` naming. * Types are named `...Error`. * `enum` variants are named `...Err`. * Rename `VolMap{2d, 3d}` -> `VolGrid{2d, 3d}`. This is in preparation to an upcoming change where a “map” in the game related sense will be added. * Add volume iterators. There are two types of them: * _Position_ iterators obtained from the trait `IntoPosIterator` using the method `fn pos_iter(self, lower_bound: Vec3<i32>, upper_bound: Vec3<i32>) -> ...` which returns an iterator over `Vec3<i32>`. * _Volume_ iterators obtained from the trait `IntoVolIterator` using the method `fn vol_iter(self, lower_bound: Vec3<i32>, upper_bound: Vec3<i32>) -> ...` which returns an iterator over `(Vec3<i32>, &Self::Vox)`. Those traits will usually be implemented by references to volume types (i.e. `impl IntoVolIterator<'a> for &'a T` where `T` is some type which usually implements several volume traits, such as `Chunk`). * _Position_ iterators iterate over the positions valid for that volume. * _Volume_ iterators do the same but return not only the position but also the voxel at that position, in each iteration. * Introduce trait `RectSizedVol` for the use case which we have with `Chonk`: A `Chonk` is sized only in x and y direction. * Introduce traits `RasterableVol`, `RectRasterableVol` * `RasterableVol` represents a volume that is compile-time sized and has its lower bound at `(0, 0, 0)`. The name `RasterableVol` was chosen because such a volume can be used with `VolGrid3d`. * `RectRasterableVol` represents a volume that is compile-time sized at least in x and y direction and has its lower bound at `(0, 0, z)`. There's no requirement on he lower bound or size in z direction. The name `RectRasterableVol` was chosen because such a volume can be used with `VolGrid2d`.
2019-09-03 22:23:29 +00:00
block_pos: pos
.map2(TerrainChunkSize::RECT_SIZE, |e, sz: u32| e * sz as i32),
2019-06-22 21:44:27 +00:00
dist: (pos - chunk_pos).map(|e| e as f32).magnitude(),
seed: *seed,
})
.collect::<Vec<_>>();
// Sort regions based on distance
near.sort_by(|a, b| a.dist.partial_cmp(&b.dist).unwrap());
let nearest_cell_pos = near[0].chunk_pos;
if nearest_cell_pos.x >= 0 && nearest_cell_pos.y >= 0 {
let nearest_cell_pos = nearest_cell_pos.map(|e| e as usize) / cell_size;
chunk.location = loc_grid
.get(nearest_cell_pos.y * grid_size.x + nearest_cell_pos.x)
.cloned()
.unwrap_or(None)
.map(|loc_idx| LocationInfo { loc_idx, near });
let town_size = 200;
let in_town = chunk
.location
.as_ref()
.map(|l| {
locations[l.loc_idx]
.center
.map(|e| e as i64)
.distance_squared(block_pos.map(|e| e as i64))
< town_size * town_size
})
.unwrap_or(false);
if in_town {
chunk.spawn_rate = 0.0;
}
2019-06-18 21:22:31 +00:00
}
});
2019-06-18 21:22:31 +00:00
2019-08-24 13:23:42 +00:00
// Stage 2 - towns!
let chunk_idx_center = |e: Vec2<i32>| {
e.map2(TerrainChunkSize::RECT_SIZE, |e, sz: u32| {
e * sz as i32 + sz as i32 / 2
})
};
let maybe_towns = self
.gen_ctx
.town_gen
.par_iter(
chunk_idx_center(Vec2::zero()),
chunk_idx_center(WORLD_SIZE.map(|e| e as i32)),
)
.map_init(
|| Box::new(BlockGen::new(ColumnGen::new(self))),
|mut block_gen, (pos, seed)| {
let mut rng = ChaChaRng::from_seed(seed_expan::rng_state(seed));
// println!("Town: {:?}", town);
TownState::generate(pos, &mut block_gen, &mut rng).map(|t| (pos, Arc::new(t)))
},
)
.filter_map(|x| x)
.collect::<HashMap<_, _>>();
let gen_ctx = &self.gen_ctx;
self.chunks
.par_iter_mut()
.enumerate()
.for_each(|(ij, chunk)| {
let chunk_pos = uniform_idx_as_vec2(ij);
let wpos = chunk_idx_center(chunk_pos);
2019-08-24 13:23:42 +00:00
let near_towns = gen_ctx.town_gen.get(wpos);
2019-08-24 13:23:42 +00:00
let town = near_towns
.iter()
2019-09-04 23:03:49 +00:00
.min_by_key(|(pos, _seed)| wpos.distance_squared(*pos));
2019-08-24 13:23:42 +00:00
let maybe_town = town
.and_then(|(pos, _seed)| maybe_towns.get(pos))
// Only care if we're close to the town
.filter(|town| {
Vec2::from(town.center()).distance_squared(wpos)
< town.radius().add(64).pow(2)
})
.cloned();
chunk.structures.town = maybe_town;
});
2019-08-24 13:23:42 +00:00
2019-06-18 21:22:31 +00:00
self.rng = rng;
2019-06-25 15:59:09 +00:00
self.locations = locations;
}
2019-06-18 21:22:31 +00:00
pub fn get(&self, chunk_pos: Vec2<i32>) -> Option<&SimChunk> {
if chunk_pos
2019-06-18 21:22:31 +00:00
.map2(WORLD_SIZE, |e, sz| e >= 0 && e < sz as i32)
.reduce_and()
{
Some(&self.chunks[vec2_as_uniform_idx(chunk_pos)])
} else {
None
}
}
2019-08-24 13:23:42 +00:00
pub fn get_wpos(&self, wpos: Vec2<i32>) -> Option<&SimChunk> {
common: Rework volume API See the doc comments in `common/src/vol.rs` for more information on the API itself. The changes include: * Consistent `Err`/`Error` naming. * Types are named `...Error`. * `enum` variants are named `...Err`. * Rename `VolMap{2d, 3d}` -> `VolGrid{2d, 3d}`. This is in preparation to an upcoming change where a “map” in the game related sense will be added. * Add volume iterators. There are two types of them: * _Position_ iterators obtained from the trait `IntoPosIterator` using the method `fn pos_iter(self, lower_bound: Vec3<i32>, upper_bound: Vec3<i32>) -> ...` which returns an iterator over `Vec3<i32>`. * _Volume_ iterators obtained from the trait `IntoVolIterator` using the method `fn vol_iter(self, lower_bound: Vec3<i32>, upper_bound: Vec3<i32>) -> ...` which returns an iterator over `(Vec3<i32>, &Self::Vox)`. Those traits will usually be implemented by references to volume types (i.e. `impl IntoVolIterator<'a> for &'a T` where `T` is some type which usually implements several volume traits, such as `Chunk`). * _Position_ iterators iterate over the positions valid for that volume. * _Volume_ iterators do the same but return not only the position but also the voxel at that position, in each iteration. * Introduce trait `RectSizedVol` for the use case which we have with `Chonk`: A `Chonk` is sized only in x and y direction. * Introduce traits `RasterableVol`, `RectRasterableVol` * `RasterableVol` represents a volume that is compile-time sized and has its lower bound at `(0, 0, 0)`. The name `RasterableVol` was chosen because such a volume can be used with `VolGrid3d`. * `RectRasterableVol` represents a volume that is compile-time sized at least in x and y direction and has its lower bound at `(0, 0, z)`. There's no requirement on he lower bound or size in z direction. The name `RectRasterableVol` was chosen because such a volume can be used with `VolGrid2d`.
2019-09-03 22:23:29 +00:00
self.get(
wpos.map2(Vec2::from(TerrainChunkSize::RECT_SIZE), |e, sz: u32| {
e / sz as i32
}),
)
2019-08-24 13:23:42 +00:00
}
2019-06-18 21:22:31 +00:00
pub fn get_mut(&mut self, chunk_pos: Vec2<i32>) -> Option<&mut SimChunk> {
if chunk_pos
.map2(WORLD_SIZE, |e, sz| e >= 0 && e < sz as i32)
.reduce_and()
{
Some(&mut self.chunks[vec2_as_uniform_idx(chunk_pos)])
2019-06-18 21:22:31 +00:00
} else {
None
}
}
pub fn get_base_z(&self, chunk_pos: Vec2<i32>) -> Option<f32> {
if !chunk_pos
.map2(WORLD_SIZE, |e, sz| e > 0 && e < sz as i32 - 2)
.reduce_and()
{
return None;
}
let chunk_idx = vec2_as_uniform_idx(chunk_pos);
local_cells(chunk_idx)
.flat_map(|neighbor_idx| {
let neighbor_pos = uniform_idx_as_vec2(neighbor_idx);
let neighbor_chunk = self.get(neighbor_pos);
let river_kind = neighbor_chunk.and_then(|c| c.river.river_kind);
let has_water = river_kind.is_some() && river_kind != Some(RiverKind::Ocean);
if (neighbor_pos - chunk_pos).reduce_partial_max() <= 1 || has_water {
neighbor_chunk.map(|c| c.get_base_z())
} else {
None
}
})
.fold(None, |a: Option<f32>, x| a.map(|a| a.min(x)).or(Some(x)))
}
pub fn get_interpolated<T, F>(&self, pos: Vec2<i32>, mut f: F) -> Option<T>
where
T: Copy + Default + Add<Output = T> + Mul<f32, Output = T>,
F: FnMut(&SimChunk) -> T,
{
common: Rework volume API See the doc comments in `common/src/vol.rs` for more information on the API itself. The changes include: * Consistent `Err`/`Error` naming. * Types are named `...Error`. * `enum` variants are named `...Err`. * Rename `VolMap{2d, 3d}` -> `VolGrid{2d, 3d}`. This is in preparation to an upcoming change where a “map” in the game related sense will be added. * Add volume iterators. There are two types of them: * _Position_ iterators obtained from the trait `IntoPosIterator` using the method `fn pos_iter(self, lower_bound: Vec3<i32>, upper_bound: Vec3<i32>) -> ...` which returns an iterator over `Vec3<i32>`. * _Volume_ iterators obtained from the trait `IntoVolIterator` using the method `fn vol_iter(self, lower_bound: Vec3<i32>, upper_bound: Vec3<i32>) -> ...` which returns an iterator over `(Vec3<i32>, &Self::Vox)`. Those traits will usually be implemented by references to volume types (i.e. `impl IntoVolIterator<'a> for &'a T` where `T` is some type which usually implements several volume traits, such as `Chunk`). * _Position_ iterators iterate over the positions valid for that volume. * _Volume_ iterators do the same but return not only the position but also the voxel at that position, in each iteration. * Introduce trait `RectSizedVol` for the use case which we have with `Chonk`: A `Chonk` is sized only in x and y direction. * Introduce traits `RasterableVol`, `RectRasterableVol` * `RasterableVol` represents a volume that is compile-time sized and has its lower bound at `(0, 0, 0)`. The name `RasterableVol` was chosen because such a volume can be used with `VolGrid3d`. * `RectRasterableVol` represents a volume that is compile-time sized at least in x and y direction and has its lower bound at `(0, 0, z)`. There's no requirement on he lower bound or size in z direction. The name `RectRasterableVol` was chosen because such a volume can be used with `VolGrid2d`.
2019-09-03 22:23:29 +00:00
let pos = pos.map2(TerrainChunkSize::RECT_SIZE, |e, sz: u32| {
e as f64 / sz as f64
});
let cubic = |a: T, b: T, c: T, d: T, x: f32| -> T {
let x2 = x * x;
// Catmull-Rom splines
let co0 = a * -0.5 + b * 1.5 + c * -1.5 + d * 0.5;
let co1 = a + b * -2.5 + c * 2.0 + d * -0.5;
let co2 = a * -0.5 + c * 0.5;
let co3 = b;
co0 * x2 * x + co1 * x2 + co2 * x + co3
};
let mut x = [T::default(); 4];
for (x_idx, j) in (-1..3).enumerate() {
2019-06-19 14:55:26 +00:00
let y0 = f(self.get(pos.map2(Vec2::new(j, -1), |e, q| e.max(0.0) as i32 + q))?);
2019-06-18 21:22:31 +00:00
let y1 = f(self.get(pos.map2(Vec2::new(j, 0), |e, q| e.max(0.0) as i32 + q))?);
let y2 = f(self.get(pos.map2(Vec2::new(j, 1), |e, q| e.max(0.0) as i32 + q))?);
let y3 = f(self.get(pos.map2(Vec2::new(j, 2), |e, q| e.max(0.0) as i32 + q))?);
x[x_idx] = cubic(y0, y1, y2, y3, pos.y.fract() as f32);
}
Some(cubic(x[0], x[1], x[2], x[3], pos.x.fract() as f32))
}
/// M. Steffen splines.
///
/// A more expensive cubic interpolation function that can preserve monotonicity between
/// points. This is useful if you rely on relative differences between endpoints being
/// preserved at all interior points. For example, we use this with riverbeds (and water
/// height on along rivers) to maintain the invariant that the rivers always flow downhill at
/// interior points (not just endpoints), without needing to flatten out the river.
pub fn get_interpolated_monotone<T, F>(&self, pos: Vec2<i32>, mut f: F) -> Option<T>
where
T: Copy + Default + Signed + Float + Add<Output = T> + Mul<f32, Output = T>,
F: FnMut(&SimChunk) -> T,
{
// See http://articles.adsabs.harvard.edu/cgi-bin/nph-iarticle_query?1990A%26A...239..443S&defaultprint=YES&page_ind=0&filetype=.pdf
//
// Note that these are only guaranteed monotone in one dimension; fortunately, that is
// sufficient for our purposes.
let pos = pos.map2(TerrainChunkSize::RECT_SIZE, |e, sz: u32| {
e as f64 / sz as f64
});
let secant = |b: T, c: T| c - b;
let parabola = |a: T, c: T| -a * 0.5 + c * 0.5;
let slope = |_a: T, _b: T, _c: T, s_a: T, s_b: T, p_b: T| {
// ((b - a).signum() + (c - b).signum()) * s
(s_a.signum() + s_b.signum()) * (s_a.abs().min(s_b.abs()).min(p_b.abs() * 0.5))
};
let cubic = |a: T, b: T, c: T, d: T, x: f32| -> T {
// Compute secants.
let s_a = secant(a, b);
let s_b = secant(b, c);
let s_c = secant(c, d);
// Computing slopes from parabolas.
let p_b = parabola(a, c);
let p_c = parabola(b, d);
// Get slopes (setting distance between neighbors to 1.0).
let slope_b = slope(a, b, c, s_a, s_b, p_b);
let slope_c = slope(b, c, d, s_b, s_c, p_c);
let x2 = x * x;
// Interpolating splines.
let co0 = slope_b + slope_c - s_b * 2.0;
// = a * -0.5 + c * 0.5 + b * -0.5 + d * 0.5 - 2 * (c - b)
// = a * -0.5 + b * 1.5 - c * 1.5 + d * 0.5;
let co1 = s_b * 3.0 - slope_b * 2.0 - slope_c;
// = (3.0 * (c - b) - 2.0 * (a * -0.5 + c * 0.5) - (b * -0.5 + d * 0.5))
// = a + b * -2.5 + c * 2.0 + d * -0.5;
let co2 = slope_b;
// = a * -0.5 + c * 0.5;
let co3 = b;
co0 * x2 * x + co1 * x2 + co2 * x + co3
};
let mut x = [T::default(); 4];
for (x_idx, j) in (-1..3).enumerate() {
let y0 = f(self.get(pos.map2(Vec2::new(j, -1), |e, q| e.max(0.0) as i32 + q))?);
let y1 = f(self.get(pos.map2(Vec2::new(j, 0), |e, q| e.max(0.0) as i32 + q))?);
let y2 = f(self.get(pos.map2(Vec2::new(j, 1), |e, q| e.max(0.0) as i32 + q))?);
let y3 = f(self.get(pos.map2(Vec2::new(j, 2), |e, q| e.max(0.0) as i32 + q))?);
x[x_idx] = cubic(y0, y1, y2, y3, pos.y.fract() as f32);
}
Some(cubic(x[0], x[1], x[2], x[3], pos.x.fract() as f32))
}
/// Bilinear interpolation.
///
/// Linear interpolation in both directions (i.e. quadratic interpolation).
pub fn get_interpolated_bilinear<T, F>(&self, pos: Vec2<i32>, mut f: F) -> Option<T>
where
T: Copy + Default + Signed + Float + Add<Output = T> + Mul<f32, Output = T>,
F: FnMut(&SimChunk) -> T,
{
// (i) Find downhill for all four points.
// (ii) Compute distance from each downhill point and do linear interpolation on their heights.
// (iii) Compute distance between each neighboring point and do linear interpolation on
// their distance-interpolated heights.
// See http://articles.adsabs.harvard.edu/cgi-bin/nph-iarticle_query?1990A%26A...239..443S&defaultprint=YES&page_ind=0&filetype=.pdf
//
// Note that these are only guaranteed monotone in one dimension; fortunately, that is
// sufficient for our purposes.
let pos = pos.map2(TerrainChunkSize::RECT_SIZE, |e, sz: u32| {
e as f64 / sz as f64
});
// Orient the chunk in the direction of the most downhill point of the four. If there is
// no "most downhill" point, then we don't care.
let x0 = pos.map2(Vec2::new(0, 0), |e, q| e.max(0.0) as i32 + q);
let p0 = self.get(x0)?;
let y0 = f(p0);
let x1 = pos.map2(Vec2::new(1, 0), |e, q| e.max(0.0) as i32 + q);
let p1 = self.get(x1)?;
let y1 = f(p1);
let x2 = pos.map2(Vec2::new(0, 1), |e, q| e.max(0.0) as i32 + q);
let p2 = self.get(x2)?;
let y2 = f(p2);
let x3 = pos.map2(Vec2::new(1, 1), |e, q| e.max(0.0) as i32 + q);
let p3 = self.get(x3)?;
let y3 = f(p3);
let z0 = y0
.mul(1.0 - pos.x.fract() as f32)
.mul(1.0 - pos.y.fract() as f32);
let z1 = y1.mul(pos.x.fract() as f32).mul(1.0 - pos.y.fract() as f32);
let z2 = y2.mul(1.0 - pos.x.fract() as f32).mul(pos.y.fract() as f32);
let z3 = y3.mul(pos.x.fract() as f32).mul(pos.y.fract() as f32);
Some(z0 + z1 + z2 + z3)
}
}
2019-06-04 17:19:40 +00:00
pub struct SimChunk {
pub chaos: f32,
pub alt: f32,
2019-11-19 18:34:52 +00:00
pub basement: f32,
pub water_alt: f32,
pub downhill: Option<Vec2<i32>>,
pub flux: f32,
2019-06-04 17:19:40 +00:00
pub temp: f32,
2019-08-18 16:35:27 +00:00
pub humidity: f32,
2019-06-04 17:19:40 +00:00
pub rockiness: f32,
pub is_cliffs: bool,
2019-06-21 00:53:11 +00:00
pub near_cliffs: bool,
2019-06-04 17:19:40 +00:00
pub tree_density: f32,
2019-06-11 18:39:25 +00:00
pub forest_kind: ForestKind,
2019-06-25 15:59:09 +00:00
pub spawn_rate: f32,
2019-06-22 21:44:27 +00:00
pub location: Option<LocationInfo>,
pub river: RiverData,
pub is_underwater: bool,
2019-08-23 21:33:14 +00:00
pub structures: Structures,
2019-06-22 21:44:27 +00:00
}
#[derive(Copy, Clone)]
pub struct RegionInfo {
pub chunk_pos: Vec2<i32>,
pub block_pos: Vec2<i32>,
pub dist: f32,
pub seed: u32,
}
#[derive(Clone)]
pub struct LocationInfo {
2019-06-25 15:59:09 +00:00
pub loc_idx: usize,
2019-06-22 21:44:27 +00:00
pub near: Vec<RegionInfo>,
2019-06-04 17:19:40 +00:00
}
2019-08-23 21:33:14 +00:00
#[derive(Clone)]
pub struct Structures {
pub town: Option<Arc<TownState>>,
}
2019-06-04 17:19:40 +00:00
impl SimChunk {
fn generate(posi: usize, gen_ctx: &GenCtx, gen_cdf: &GenCdf) -> Self {
2019-08-21 18:41:32 +00:00
let pos = uniform_idx_as_vec2(posi);
common: Rework volume API See the doc comments in `common/src/vol.rs` for more information on the API itself. The changes include: * Consistent `Err`/`Error` naming. * Types are named `...Error`. * `enum` variants are named `...Err`. * Rename `VolMap{2d, 3d}` -> `VolGrid{2d, 3d}`. This is in preparation to an upcoming change where a “map” in the game related sense will be added. * Add volume iterators. There are two types of them: * _Position_ iterators obtained from the trait `IntoPosIterator` using the method `fn pos_iter(self, lower_bound: Vec3<i32>, upper_bound: Vec3<i32>) -> ...` which returns an iterator over `Vec3<i32>`. * _Volume_ iterators obtained from the trait `IntoVolIterator` using the method `fn vol_iter(self, lower_bound: Vec3<i32>, upper_bound: Vec3<i32>) -> ...` which returns an iterator over `(Vec3<i32>, &Self::Vox)`. Those traits will usually be implemented by references to volume types (i.e. `impl IntoVolIterator<'a> for &'a T` where `T` is some type which usually implements several volume traits, such as `Chunk`). * _Position_ iterators iterate over the positions valid for that volume. * _Volume_ iterators do the same but return not only the position but also the voxel at that position, in each iteration. * Introduce trait `RectSizedVol` for the use case which we have with `Chonk`: A `Chonk` is sized only in x and y direction. * Introduce traits `RasterableVol`, `RectRasterableVol` * `RasterableVol` represents a volume that is compile-time sized and has its lower bound at `(0, 0, 0)`. The name `RasterableVol` was chosen because such a volume can be used with `VolGrid3d`. * `RectRasterableVol` represents a volume that is compile-time sized at least in x and y direction and has its lower bound at `(0, 0, z)`. There's no requirement on he lower bound or size in z direction. The name `RectRasterableVol` was chosen because such a volume can be used with `VolGrid2d`.
2019-09-03 22:23:29 +00:00
let wposf = (pos * TerrainChunkSize::RECT_SIZE.map(|e| e as i32)).map(|e| e as f64);
2019-06-04 17:19:40 +00:00
let _map_edge_factor = map_edge_factor(posi);
2019-08-22 19:03:42 +00:00
let (_, chaos) = gen_cdf.chaos[posi];
let alt_pre = gen_cdf.alt[posi] as f32;
let basement_pre = gen_cdf.basement[posi] as f32;
let water_alt_pre = gen_cdf.water_alt[posi];
let downhill_pre = gen_cdf.dh[posi];
let flux = gen_cdf.flux[posi] as f32;
let river = gen_cdf.rivers[posi].clone();
// Can have NaNs in non-uniform part where pure_water returned true. We just test one of
// the four in order to find out whether this is the case.
let (flux_uniform, /*flux_non_uniform*/ _) = gen_cdf.pure_flux[posi];
let (alt_uniform, _) = gen_cdf.alt_no_water[posi];
2019-08-22 22:57:42 +00:00
let (temp_uniform, _) = gen_cdf.temp_base[posi];
let (humid_uniform, _) = gen_cdf.humid_base[posi];
/* // Vertical difference from the equator (NOTE: "uniform" with much lower granularity than
// other uniform quantities, but hopefully this doesn't matter *too* much--if it does, we
// can always add a small x component).
//
// Not clear that we want this yet, let's see.
let latitude_uniform = (pos.y as f32 / WORLD_SIZE.y as f32).sub(0.5).mul(2.0);
// Even less granular--if this matters we can make the sign affect the quantiy slightly.
let abs_lat_uniform = latitude_uniform.abs(); */
2019-08-19 17:20:54 +00:00
2019-08-20 20:48:22 +00:00
// Take the weighted average of our randomly generated base humidity, the scaled
// negative altitude, and the calculated water flux over this point in order to compute
// humidity.
const HUMID_WEIGHTS: [f32; /*3*/2] = [2.0, 1.0/*, 1.0*/];
let humidity = /*if flux_non_uniform.is_nan() {
0.0
} else */{
cdf_irwin_hall(
&HUMID_WEIGHTS,
[humid_uniform, flux_uniform/*, 1.0 - alt_uniform*/],
)
};
// We also correlate temperature negatively with altitude and absolute latitude, using
// different weighting than we use for humidity.
const TEMP_WEIGHTS: [f32; 2] = [/*1.5, */ 1.0, 2.0];
let temp = /*if flux_non_uniform.is_nan() {
0.0
} else */{
cdf_irwin_hall(
&TEMP_WEIGHTS,
[
temp_uniform,
1.0 - alt_uniform, /* 1.0 - abs_lat_uniform*/
],
)
}
// Convert to [-1, 1]
.sub(0.5)
.mul(2.0);
/* if (temp - (1.0 - alt_uniform).sub(0.5).mul(2.0)).abs() >= 1e-7 {
panic!("Halp!");
} */
2019-08-19 17:20:54 +00:00
2019-11-19 18:34:52 +00:00
let height_scale = 1.0; // 1.0 / CONFIG.mountain_scale;
let mut alt = CONFIG.sea_level.add(alt_pre.div(height_scale));
let mut basement = CONFIG.sea_level.add(basement_pre.div(height_scale));
let water_alt = CONFIG.sea_level.add(water_alt_pre.div(height_scale));
let downhill = if downhill_pre == -2 {
None
} else if downhill_pre < 0 {
panic!("Uh... shouldn't this never, ever happen?");
} else {
Some(
uniform_idx_as_vec2(downhill_pre as usize)
* TerrainChunkSize::RECT_SIZE.map(|e| e as i32),
)
};
2019-08-19 17:20:54 +00:00
2019-06-21 00:53:11 +00:00
let cliff = gen_ctx.cliff_nz.get((wposf.div(2048.0)).into_array()) as f32 + chaos * 0.2;
2019-08-22 15:45:47 +00:00
// Logistic regression. Make sure x ∈ (0, 1).
let logit = |x: f64| x.ln() - x.neg().ln_1p();
2019-08-22 15:45:47 +00:00
// 0.5 + 0.5 * tanh(ln(1 / (1 - 0.1) - 1) / (2 * (sqrt(3)/pi)))
let logistic_2_base = 3.0f64.sqrt().mul(f64::consts::FRAC_2_PI);
2019-08-22 15:45:47 +00:00
// Assumes μ = 0, σ = 1
let logistic_cdf = |x: f64| x.div(logistic_2_base).tanh().mul(0.5).add(0.5);
let is_underwater = match river.river_kind {
Some(RiverKind::Ocean) | Some(RiverKind::Lake { .. }) => true,
Some(RiverKind::River { .. }) => false, // TODO: inspect width
None => false,
};
let river_xy = Vec2::new(river.velocity.x, river.velocity.y).magnitude();
let river_slope = river.velocity.z / river_xy;
match river.river_kind {
Some(RiverKind::River { cross_section }) => {
if cross_section.x >= 0.5 && cross_section.y >= CONFIG.river_min_height {
/* println!(
"Big area! Pos area: {:?}, River data: {:?}, slope: {:?}",
wposf, river, river_slope
); */
}
if river_slope.abs() >= /*1.0*//*3.0.sqrt() / 3.0*/0.25 && cross_section.x >= 1.0 {
log::debug!(
"Big waterfall! Pos area: {:?}, River data: {:?}, slope: {:?}",
wposf,
river,
river_slope
);
}
}
Some(RiverKind::Lake { .. }) => {
// Forces lakes to be downhill from the land around them, and adds some noise to
// the lake bed to make sure it's not too flat.
let lake_bottom_nz = (gen_ctx.small_nz.get((wposf.div(20.0)).into_array()) as f32)
.max(-1.0)
.min(1.0)
.mul(3.0);
alt = alt.min(water_alt - 5.0) + lake_bottom_nz;
}
_ => {}
}
2019-08-22 15:45:47 +00:00
2019-11-20 10:33:53 +00:00
// No trees in the ocean, with zero humidity (currently), or directly on bedrock.
let tree_density = if is_underwater
/* || alt - basement.min(alt) < 2.0 */
{
2019-08-22 21:25:17 +00:00
0.0
} else {
let tree_density = (gen_ctx.tree_nz.get((wposf.div(1024.0)).into_array()))
2019-08-18 23:52:26 +00:00
.mul(1.5)
.add(1.0)
.mul(0.5)
.mul(1.2 - chaos as f64 * 0.95)
2019-08-18 23:52:26 +00:00
.add(0.05)
.max(0.0)
2019-08-22 15:45:47 +00:00
.min(1.0);
// Tree density should go (by a lot) with humidity.
if humidity <= 0.0 || tree_density <= 0.0 {
0.0
} else if humidity >= 1.0 || tree_density >= 1.0 {
1.0
} else {
2019-08-22 21:48:13 +00:00
// Weighted logit sum.
logistic_cdf(logit(humidity as f64) + 0.5 * logit(tree_density))
2019-08-22 15:45:47 +00:00
}
// rescale to (-0.95, 0.95)
2019-08-22 21:25:17 +00:00
.sub(0.5)
.mul(0.95)
2019-08-22 21:25:17 +00:00
.add(0.5)
} as f32;
2019-08-18 23:52:26 +00:00
2019-06-04 17:19:40 +00:00
Self {
chaos,
flux,
2019-06-04 17:19:40 +00:00
alt,
2019-11-19 18:34:52 +00:00
basement: basement.min(alt),
water_alt,
downhill,
2019-06-11 18:39:25 +00:00
temp,
2019-08-18 16:35:27 +00:00
humidity,
rockiness: if true {
(gen_ctx.rock_nz.get((wposf.div(1024.0)).into_array()) as f32)
.sub(0.1)
.mul(1.3)
.max(0.0)
} else {
0.0
},
is_underwater,
is_cliffs: cliff > 0.5 && !is_underwater,
2019-08-24 13:23:42 +00:00
near_cliffs: cliff > 0.2,
2019-08-18 23:52:26 +00:00
tree_density,
forest_kind: if temp > CONFIG.temperate_temp {
2019-06-11 18:39:25 +00:00
if temp > CONFIG.desert_temp {
2019-08-18 16:35:27 +00:00
if humidity > CONFIG.jungle_hum {
// Forests in desert temperatures with extremely high humidity
// should probably be different from palm trees, but we use them
// for now.
ForestKind::Palm
2019-08-22 21:25:17 +00:00
} else if humidity > CONFIG.forest_hum {
2019-08-18 16:35:27 +00:00
ForestKind::Palm
2019-08-22 15:45:47 +00:00
} else if humidity > CONFIG.desert_hum {
2019-08-18 16:35:27 +00:00
// Low but not desert humidity, so we should really have some other
// terrain...
ForestKind::Savannah
2019-08-22 15:45:47 +00:00
} else {
ForestKind::Savannah
2019-08-18 16:35:27 +00:00
}
} else if temp > CONFIG.tropical_temp {
2019-08-18 16:35:27 +00:00
if humidity > CONFIG.jungle_hum {
if tree_density > 0.0 {
// println!("Mangrove: {:?}", wposf);
}
2019-08-18 16:35:27 +00:00
ForestKind::Mangrove
} else if humidity > CONFIG.forest_hum {
2019-08-19 17:20:54 +00:00
// NOTE: Probably the wrong kind of tree for this climate.
2019-08-18 16:35:27 +00:00
ForestKind::Oak
2019-08-22 15:45:47 +00:00
} else if humidity > CONFIG.desert_hum {
2019-08-19 17:20:54 +00:00
// Low but not desert... need something besides savannah.
2019-08-18 16:35:27 +00:00
ForestKind::Savannah
2019-08-22 15:45:47 +00:00
} else {
ForestKind::Savannah
2019-08-18 16:35:27 +00:00
}
2019-06-11 18:39:25 +00:00
} else {
2019-08-18 16:35:27 +00:00
if humidity > CONFIG.jungle_hum {
// Temperate climate with jungle humidity...
// https://en.wikipedia.org/wiki/Humid_subtropical_climates are often
// densely wooded and full of water. Semitropical rainforests, basically.
2019-08-19 17:20:54 +00:00
// For now we just treet them like other rainforests.
2019-08-21 18:41:32 +00:00
ForestKind::Oak
2019-08-18 16:35:27 +00:00
} else if humidity > CONFIG.forest_hum {
// Moderate climate, moderate humidity.
ForestKind::Oak
2019-08-22 15:45:47 +00:00
} else if humidity > CONFIG.desert_hum {
2019-08-18 16:35:27 +00:00
// With moderate temperature and low humidity, we should probably see
// something different from savannah, but oh well...
ForestKind::Savannah
2019-08-22 15:45:47 +00:00
} else {
ForestKind::Savannah
2019-08-18 16:35:27 +00:00
}
2019-06-11 18:39:25 +00:00
}
} else {
2019-08-18 16:35:27 +00:00
// For now we don't take humidity into account for cold climates (but we really
2019-08-19 01:01:11 +00:00
// should!) except that we make sure we only have snow pines when there is snow.
2019-09-24 17:56:51 +00:00
if temp <= CONFIG.snow_temp {
2019-06-11 18:39:25 +00:00
ForestKind::SnowPine
2019-08-22 15:45:47 +00:00
} else if humidity > CONFIG.desert_hum {
ForestKind::Pine
2019-08-19 01:01:11 +00:00
} else {
2019-08-22 15:45:47 +00:00
// Should really have something like tundra.
2019-08-19 01:01:11 +00:00
ForestKind::Pine
2019-06-11 18:39:25 +00:00
}
},
2019-06-25 15:59:09 +00:00
spawn_rate: 1.0,
2019-06-10 16:28:02 +00:00
location: None,
river,
2019-08-23 21:33:14 +00:00
structures: Structures { town: None },
2019-06-04 17:19:40 +00:00
}
}
pub fn get_base_z(&self) -> f32 {
self.alt - self.chaos * 50.0 - 16.0
2019-06-04 17:19:40 +00:00
}
2019-06-18 21:22:31 +00:00
2019-06-25 15:59:09 +00:00
pub fn get_name(&self, world: &WorldSim) -> Option<String> {
if let Some(loc) = &self.location {
2019-06-26 00:27:41 +00:00
Some(world.locations[loc.loc_idx].name().to_string())
2019-06-25 15:59:09 +00:00
} else {
None
}
2019-06-18 21:22:31 +00:00
}
pub fn get_biome(&self) -> BiomeKind {
if self.alt < CONFIG.sea_level {
BiomeKind::Ocean
} else if self.chaos > 0.6 {
BiomeKind::Mountain
} else if self.temp > CONFIG.desert_temp {
BiomeKind::Desert
} else if self.temp < CONFIG.snow_temp {
BiomeKind::Snowlands
} else if self.tree_density > 0.65 {
BiomeKind::Forest
} else {
BiomeKind::Grassland
}
}
2019-06-04 17:19:40 +00:00
}