mirror of
https://gitlab.com/veloren/veloren.git
synced 2024-08-30 18:12:32 +00:00
Fixing jungle.
This commit is contained in:
parent
8c644e2bb1
commit
05d501632e
@ -256,6 +256,11 @@ impl<'a> Sampler for ColumnGen<'a> {
|
||||
Rgb::new(0.01, 0.3, 0.0),
|
||||
marble,
|
||||
);
|
||||
let dead_tundra = Lerp::lerp(
|
||||
snow,
|
||||
Rgb::new(0.05, 0.05, 0.1),
|
||||
marble,
|
||||
);
|
||||
let cliff = Rgb::lerp(cold_stone, warm_stone, marble);
|
||||
|
||||
let grass = Rgb::lerp(cold_grass, warm_grass, marble.powf(1.5).powf(1.0.sub(humidity)));
|
||||
@ -264,19 +269,26 @@ impl<'a> Sampler for ColumnGen<'a> {
|
||||
let rainforest = Rgb::lerp(wet_grass, warm_grass, marble.powf(1.5).powf(1.0.sub(humidity)));
|
||||
let sand = Rgb::lerp(beach_sand, desert_sand, marble);
|
||||
|
||||
|
||||
let tropical = Rgb::lerp(
|
||||
grass,
|
||||
Rgb::lerp(
|
||||
grass,
|
||||
Rgb::new(0.15, 0.2, 0.15),
|
||||
marble_small.sub(0.5).mul(0.2).add(0.75).powf(0.667).powf(1.0.sub(humidity))
|
||||
),
|
||||
Rgb::new(0.87, 0.62, 0.56),
|
||||
marble_small.sub(0.5).mul(0.2).add(0.75).powf(0.667).powf(1.0.sub(humidity)),
|
||||
marble.powf(1.5).sub(0.5).mul(256.0)
|
||||
);
|
||||
|
||||
// For below desert humidity, we are always sand or rock, depending on altitude and
|
||||
// temperature.
|
||||
let ground = Rgb::lerp(
|
||||
Rgb::lerp(
|
||||
dead_tundra,
|
||||
sand,
|
||||
dirt,
|
||||
temp.sub(CONFIG.desert_temp).mul(0.5)
|
||||
temp.sub(CONFIG.snow_temp)
|
||||
.div(CONFIG.desert_temp.sub(CONFIG.snow_temp))
|
||||
.mul(0.5)
|
||||
),
|
||||
cliff,
|
||||
alt.sub(CONFIG.mountain_scale * 0.25).div(CONFIG.mountain_scale * 0.125)
|
||||
|
@ -12,10 +12,10 @@ pub struct Config {
|
||||
pub const CONFIG: Config = Config {
|
||||
sea_level: 140.0,
|
||||
mountain_scale: 1000.0,
|
||||
snow_temp: -0.4,
|
||||
snow_temp: -0.6,
|
||||
tropical_temp: 0.2,
|
||||
desert_temp: 0.45,
|
||||
desert_hum: 0.2,
|
||||
desert_temp: 0.6,
|
||||
desert_hum: 0.15,
|
||||
forest_hum: 0.5,
|
||||
jungle_hum: 0.8,
|
||||
jungle_hum: 0.85,
|
||||
};
|
||||
|
@ -1,5 +1,10 @@
|
||||
#![deny(unsafe_code)]
|
||||
#![feature(const_generics, euclidean_division, bind_by_move_pattern_guards, option_flattening)]
|
||||
#![feature(box_syntax,
|
||||
const_generics,
|
||||
euclidean_division,
|
||||
bind_by_move_pattern_guards,
|
||||
option_flattening,
|
||||
)]
|
||||
|
||||
mod all;
|
||||
mod block;
|
||||
|
@ -39,27 +39,171 @@ pub const WORLD_SIZE: Vec2<usize> = Vec2 { x: 1024, y: 1024 };
|
||||
/// If the precondition is met, the distribution of the result of calling this function will be
|
||||
/// uniformly distributed while preserving the same information that was in the original average.
|
||||
///
|
||||
/// NOTE: For N > 33 the function will no longer return correct results since we will overflow u32.
|
||||
/// For N > 33 the function will no longer return correct results since we will overflow u32.
|
||||
///
|
||||
/// NOTE:
|
||||
///
|
||||
/// Per [1], the problem of determing the CDF of
|
||||
/// the sum of uniformly distributed random variables over *different* ranges is considerably more
|
||||
/// complicated than it is for the same-range case. Fortunately, it also provides a reference to
|
||||
/// [2], which contains a complete derivation of an exact rule for the density function for
|
||||
/// this case. The CDF is just the integral of the cumulative distribution function [3],
|
||||
/// which we use to convert this into a CDF formula.
|
||||
///
|
||||
/// This allows us to sum weighted, uniform, independent random variables.
|
||||
///
|
||||
/// At some point, we should probably contribute this back to stats-rs.
|
||||
///
|
||||
/// 1. https://www.r-bloggers.com/sums-of-random-variables/,
|
||||
/// 2. Sadooghi-Alvandi, S., A. Nematollahi, & R. Habibi, 2009.
|
||||
/// On the Distribution of the Sum of Independent Uniform Random Variables.
|
||||
/// Statistical Papers, 50, 171-175.
|
||||
/// 3. hhttps://en.wikipedia.org/wiki/Cumulative_distribution_function
|
||||
fn cdf_irwin_hall<const N : usize>(weights: &[f32; N], samples: [f32; N]) -> f32 {
|
||||
// Take the average of the weights
|
||||
// (to scale the weights down so their sum is in the (0..=N) range).
|
||||
let avg = weights.iter().sum::<f32>() / N as f32;
|
||||
// Take the sum.
|
||||
// Let J_k = {(j_1, ... , j_k) : 1 ≤ j_1 < j_2 < ··· < j_k ≤ N }.
|
||||
//
|
||||
// Let A_N = Π{k = 1 to n}a_k.
|
||||
//
|
||||
// The density function for N ≥ 2 is:
|
||||
//
|
||||
// 1/(A_N * (N - 1)!) * (x^(N-1) + Σ{k = 1 to N}((-1)^k *
|
||||
// Σ{(j_1, ..., j_k) ∈ J_k}(max(0, x - Σ{l = 1 to k}(a_(j_l)))^(N - 1))))
|
||||
//
|
||||
// So the cumulative distribution function is its integral, i.e. (I think)
|
||||
//
|
||||
// 1/(product{k in A}(k) * N!) * (x^N + sum(k in 1 to N)((-1)^k *
|
||||
// sum{j in Subsets[A, {k}]}(max(0, x - sum{l in j}(l))^N)))
|
||||
//
|
||||
// which is also equivalent to
|
||||
//
|
||||
// (letting B_k = { a in Subsets[A, {k}] : sum {l in a} l }, B_(0,1) = 0 and
|
||||
// H_k = { i : 1 ≤ 1 ≤ N! / (k! * (N - k)!) })
|
||||
//
|
||||
// 1/(product{k in A}(k) * N!) * sum(k in 0 to N)((-1)^k *
|
||||
// sum{l in H_k}(max(0, x - B_(k,l))^N))
|
||||
//
|
||||
// We should be able to iterate through the whole power set
|
||||
// instead, and figure out K by calling count_ones(), so we can compute the result in O(2^N)
|
||||
// iterations.
|
||||
let x : f32 =
|
||||
weights.iter().zip(samples.iter()).map(|(weight, sample)| weight / avg * sample).sum();
|
||||
// CDF = 1 / N! * Σ{k = 0 to floor(x)} ((-1)^k (N choose k) (x - k) ^ N)
|
||||
let mut binom = 1; // (-1)^0 * (n choose 0) = 1 * 1 = 1
|
||||
let mut y = x.powi(N as i32); // 1 * (x - 0)^N = x ^N
|
||||
// 1..floor(x)
|
||||
for k in (1..=x.floor() as i32) {
|
||||
// (-1)^k (N choose k) = ((-1)^(k-1) (N choose (k - 1))) * -(N + 1 - k) / k for k ≥ 1.
|
||||
binom *= -(N as i32 + 1 - k) / k;
|
||||
y += binom as f32 * (x - k as f32).powi(N as i32);
|
||||
weights.iter().zip(samples.iter()).map(|(weight, sample)| weight * sample).sum();
|
||||
|
||||
let mut y = 0.0f32;
|
||||
for subset in (0u32..(1 << N)) {
|
||||
// Number of set elements
|
||||
let k = subset.count_ones();
|
||||
// Add together exactly the set elements to get B_subset
|
||||
let z = weights.iter()
|
||||
.enumerate()
|
||||
.filter( |(i, _)| subset & (1 << i) as u32 != 0)
|
||||
.map(|(_, k)| k)
|
||||
.sum::<f32>();
|
||||
// Compute max(0, x - B_subset)^N
|
||||
let z = (x - z).max(0.0).powi(N as i32);
|
||||
// The parity of k determines whether the sum is negated.
|
||||
y += if k & 1 == 0 { z } else { -z };
|
||||
}
|
||||
|
||||
// Divide by the product of the weights.
|
||||
y /= weights.iter().product::<f32>();
|
||||
|
||||
// Remember to multiply by 1 / N! at the end.
|
||||
y / (1..=N as i32).product::<i32>() as f32
|
||||
}
|
||||
|
||||
/// First component of each element of the vector is the computed CDF of the noise function at this
|
||||
/// index (i.e. its position in a sorted list of value returned by the noise function applied to
|
||||
/// every chunk in the game). Second component is the cached value of the noise function that
|
||||
/// generated the index.
|
||||
type InverseCdf = Box<[(f32, f32); WORLD_SIZE.x * WORLD_SIZE.y]>;
|
||||
|
||||
/// Computes the position Vec2 of a SimChunk from an index, where the index was generated by
|
||||
/// uniform_noise.
|
||||
fn uniform_idx_as_vec2(idx: usize) -> Vec2<i32> {
|
||||
Vec2::new((idx / WORLD_SIZE.x) as i32, (idx % WORLD_SIZE.x) as i32)
|
||||
}
|
||||
|
||||
/// Compute inverse cumulative distribution function for arbitrary function f, the hard way. We
|
||||
/// pre-generate noise values prior to worldgen, then sort them in order to determine the correct
|
||||
/// position in the sorted order. That lets us use `(index + 1) / (WORLDSIZE.y * WORLDSIZE.x)` as
|
||||
/// a uniformly distributed (from almost-0 to 1) regularization of the chunks. That is, if we
|
||||
/// apply the computed "function" F⁻¹(x, y) to (x, y) and get out p, it means that approximately
|
||||
/// (100 * p)% of chunks have a lower value for F⁻¹ than p. The main purpose of doing this is to
|
||||
/// make sure we are using the entire range we want, and to allow us to apply the numerous results
|
||||
/// about distributions on uniform functions to the procedural noise we generate, which lets us
|
||||
/// much more reliably control the *number* of features in the world while still letting us play
|
||||
/// with the *shape* of those features, without having arbitrary cutoff points / discontinuities
|
||||
/// (which tend to produce ugly-looking / unnatural terrain).
|
||||
///
|
||||
/// As a concrete example, before doing this it was very hard to tweak humidity so that either most
|
||||
/// of the world wasn't dry, or most of it wasn't wet, by combining the billow noise function and
|
||||
/// the computed altitude. This is because the billow noise function has a very unusual
|
||||
/// distribution that is heavily skewed towards 0. By correcting for this tendency, we can start
|
||||
/// with uniformly distributed billow noise and altitudes and combine them to get uniformly
|
||||
/// distributed humidity, while still preserving the existing shapes that the billow noise and
|
||||
/// altitude functions produce.
|
||||
///
|
||||
/// f takes an index, which represents the index corresponding to this chunk in any any SimChunk
|
||||
/// vector returned by uniform_noise, and (for convenience) the float-translated version of those
|
||||
/// coordinates.
|
||||
/// f should return a value with no NaNs. If there is a NaN, it will panic. There are no other
|
||||
/// conditions on f.
|
||||
///
|
||||
/// Returns a vec of (f32, f32) pairs consisting of the percentage of chunks with a value lower than
|
||||
/// this one, and the actual noise value (we don't need to cache it, but it makes ensuring that
|
||||
/// subsequent code that needs the noise value actually uses the same one we were using here
|
||||
/// easier).
|
||||
fn uniform_noise(f: impl Fn(usize, Vec2<f64>) -> f32) -> InverseCdf {
|
||||
let mut noise = (0..WORLD_SIZE.x * WORLD_SIZE.y)
|
||||
.map(|i| (
|
||||
i,
|
||||
f(i,
|
||||
(uniform_idx_as_vec2(i) * TerrainChunkSize::SIZE.map(|e| e as i32)).map(|e| e as f64))
|
||||
))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// sort_unstable_by is equivalent to sort_by here since we include the index in the
|
||||
// comparison. We could leave out the index, but this might make the order not
|
||||
// reproduce the same way between different versions of Rust (for example).
|
||||
noise.sort_unstable_by(|f, g| (f.1, f.0).partial_cmp(&(g.1, g.0)).unwrap());
|
||||
|
||||
// Construct a vector that associates each chunk position with the 1-indexed
|
||||
// position of the noise in the sorted vector (divided by the vector length).
|
||||
// This guarantees a uniform distribution among the samples.
|
||||
let mut uniform_noise = box [(0.0, 0.0); WORLD_SIZE.x * WORLD_SIZE.y];
|
||||
let total = (WORLD_SIZE.x * WORLD_SIZE.y) as f32;
|
||||
for (noise_idx, (chunk_idx, noise_val)) in noise.into_iter().enumerate() {
|
||||
uniform_noise[chunk_idx] = ((1 + noise_idx) as f32 / total, noise_val);
|
||||
}
|
||||
uniform_noise
|
||||
}
|
||||
|
||||
/// Calculates the smallest distance along an axis (x, y) from an edge of
|
||||
/// the world. This value is maximal at WORLD_SIZE / 2 and minimized at the extremes
|
||||
/// (0 or WORLD_SIZE on one or more axes). It then divides the quantity by cell_size,
|
||||
/// so the final result is 1 when we are not in a cell along the edge of the world, and
|
||||
/// ranges between 0 and 1 otherwise (lower when the chunk is closer to the edge).
|
||||
fn map_edge_factor(posi: usize) -> f32 {
|
||||
uniform_idx_as_vec2(posi)
|
||||
.map2(WORLD_SIZE.map(|e| e as i32), |e, sz| {
|
||||
(sz / 2 - (e - sz / 2).abs()) as f32 / 16.0
|
||||
})
|
||||
.reduce_partial_min()
|
||||
.max(0.0)
|
||||
.min(1.0)
|
||||
}
|
||||
|
||||
struct GenCdf {
|
||||
hill: InverseCdf,
|
||||
humid_base: InverseCdf,
|
||||
// humid_small: InverseCdf,
|
||||
temp_base: InverseCdf,
|
||||
alt_base: InverseCdf,
|
||||
chaos_pre: InverseCdf,
|
||||
alt_main: InverseCdf,
|
||||
alt_pre: InverseCdf,
|
||||
}
|
||||
|
||||
pub(crate) struct GenCtx {
|
||||
pub turb_x_nz: SuperSimplex,
|
||||
pub turb_y_nz: SuperSimplex,
|
||||
@ -137,11 +281,116 @@ impl WorldSim {
|
||||
.set_seed(gen_seed()),
|
||||
};
|
||||
|
||||
// From 0 to 1.6, but the distribution before the max is from -1 and 1, so there is a 50%
|
||||
// chance that hill will end up at 0.
|
||||
let hill = uniform_noise(|_, wposf| (0.0
|
||||
+ gen_ctx
|
||||
.hill_nz
|
||||
.get((wposf.div(1_500.0)).into_array())
|
||||
.mul(1.0) as f32
|
||||
+ gen_ctx
|
||||
.hill_nz
|
||||
.get((wposf.div(500.0)).into_array())
|
||||
.mul(0.3) as f32)
|
||||
.add(0.3)
|
||||
.max(0.0));
|
||||
|
||||
// 0 to 1, hopefully.
|
||||
let humid_base = uniform_noise(
|
||||
|_, wposf| (gen_ctx.humid_nz.get(wposf.div(1024.0).into_array()) as f32)
|
||||
.add(1.0)
|
||||
.mul(0.5));
|
||||
|
||||
/* // Small amount of uniform noise to add to humidity.
|
||||
let humid_small = uniform_noise(
|
||||
|_, wposf| (gen_ctx.small_nz.get((wposf.div(500.0)).into_array()) as f32)
|
||||
.mul(1.0)
|
||||
.add(0.5)); */
|
||||
|
||||
// -1 to 1.
|
||||
let temp_base = uniform_noise(
|
||||
|_, wposf| (gen_ctx.temp_nz.get((wposf.div(12000.0)).into_array()) as f32)
|
||||
);
|
||||
|
||||
// "Base" of the chunk, to be multiplied by CONFIG.mountain_scale (multiplied value is
|
||||
// from -0.25 * (CONFIG.mountain_scale * 1.1) to 0.25 * (CONFIG.mountain_scale * 0.9),
|
||||
// but value here is from -0.275 to 0.225).
|
||||
let alt_base = uniform_noise(
|
||||
|_, wposf| (gen_ctx.alt_nz.get((wposf.div(12_000.0)).into_array()) as f32)
|
||||
.sub(0.1)
|
||||
.mul(0.25));
|
||||
|
||||
// chaos produces a value in [0.1, 1.24]. It is a meta-level factor intended to reflect how
|
||||
// "chaotic" the region is--how much weird stuff is going on on this terrain.
|
||||
//
|
||||
// First, we calculate chaos_pre, which is chaos with no filter and no temperature
|
||||
// flattening (so it is between [0, 1.24] instead of [0.1, 1.24]. This is used to break
|
||||
// the cyclic dependency between temperature and altitude (altitude relies on chaos, which
|
||||
// relies on temperature, but we also want temperature to rely on altitude. We recompute
|
||||
// altitude with the temperature incorporated after we figure out temperature).
|
||||
let chaos_pre = uniform_noise(
|
||||
|posi, wposf| (gen_ctx.chaos_nz.get((wposf.div(3_000.0)).into_array()) as f32)
|
||||
.add(1.0)
|
||||
.mul(0.5)
|
||||
// [0, 1] * [0.25, 1] = [0, 1] (but probably towards the lower end)
|
||||
.mul(
|
||||
(gen_ctx.chaos_nz.get((wposf.div(6_000.0)).into_array()) as f32)
|
||||
.abs()
|
||||
.max(0.25)
|
||||
.min(1.0),
|
||||
)
|
||||
// Chaos is always increased by a little when we're on a hill (but remember that
|
||||
// hill is 0 about 50% of the time).
|
||||
// [0, 1] + 0.15 * [0, 1.6] = [0, 1.24]
|
||||
.add(0.15 * hill[posi].1));
|
||||
|
||||
// This is the extension upwards from the base added to some extra noise from -1 to 1.
|
||||
// The extra noise is multiplied by alt_main (the mountain part of the extension) clamped to
|
||||
// be between 0.25 and 1, and made 60% larger (so the extra noise is between -1.6 and 1.6,
|
||||
// and the final noise is never more than 160% or less than 40% of the original noise,
|
||||
// depending on altitude).
|
||||
// Adding this to alt_main thus yields a value between -0.4 (if alt_main = 0 and
|
||||
// gen_ctx = -1) and 2.6 (if alt_main = 1 and gen_ctx = 1). When the generated small_nz
|
||||
// value hits -0.625 the value crosses 0, so most of the points are above 0.
|
||||
//
|
||||
// Then, we add 1 and divide by 2 to get a value between 0.3 and 1.8.
|
||||
let alt_main = uniform_noise(|posi, wposf| {
|
||||
// Extension upwards from the base. A positive number from 0 to 1 curved to be maximal
|
||||
// at 0. Also to be multiplied by CONFIG.mountain_scale.
|
||||
let alt_main = (gen_ctx.alt_nz.get((wposf.div(2_000.0)).into_array()) as f32)
|
||||
.abs()
|
||||
.powf(1.35);
|
||||
|
||||
(0.0
|
||||
+ alt_main
|
||||
+ (gen_ctx.small_nz.get((wposf.div(300.0)).into_array()) as f32)
|
||||
.mul(alt_main.max(0.25))
|
||||
.mul(0.16))
|
||||
.add(1.0)
|
||||
.mul(0.5)
|
||||
});
|
||||
|
||||
// We ignore sea level because we actually want to be relative to sea level here and want
|
||||
// things in CONFIG.mountain_scale units, and we are using the version of chaos that doesn't
|
||||
// know about temperature. Otherwise, this is a correct altitude calculation.
|
||||
let alt_pre = uniform_noise(|posi, _|
|
||||
(alt_base[posi].1 + alt_main[posi].1.mul(chaos_pre[posi].1.max(0.1)))
|
||||
.mul(map_edge_factor(posi)));
|
||||
|
||||
let gen_cdf = GenCdf {
|
||||
hill,
|
||||
humid_base,
|
||||
// humid_small,
|
||||
temp_base,
|
||||
alt_base,
|
||||
chaos_pre,
|
||||
alt_main,
|
||||
alt_pre,
|
||||
};
|
||||
|
||||
let mut chunks = Vec::new();
|
||||
for x in 0..WORLD_SIZE.x as i32 {
|
||||
for y in 0..WORLD_SIZE.y as i32 {
|
||||
chunks.push(SimChunk::generate(Vec2::new(x, y), &mut gen_ctx));
|
||||
}
|
||||
for i in 0..WORLD_SIZE.x * WORLD_SIZE.y {
|
||||
chunks.push(SimChunk::generate(i, &mut gen_ctx, &gen_cdf));
|
||||
}
|
||||
|
||||
let mut this = Self {
|
||||
@ -378,22 +627,11 @@ pub struct LocationInfo {
|
||||
}
|
||||
|
||||
impl SimChunk {
|
||||
fn generate(pos: Vec2<i32>, gen_ctx: &mut GenCtx) -> Self {
|
||||
fn generate(posi: usize, gen_ctx: &mut GenCtx, gen_cdf: &GenCdf) -> Self {
|
||||
let pos = uniform_idx_as_vec2(posi);
|
||||
let wposf = (pos * TerrainChunkSize::SIZE.map(|e| e as i32)).map(|e| e as f64);
|
||||
|
||||
// From 0 to 1.6, but the distribution before the max is from -1 and 1, so there is a 50%
|
||||
// chance that hill will end up at 0.
|
||||
let hill = (0.0
|
||||
+ gen_ctx
|
||||
.hill_nz
|
||||
.get((wposf.div(1_500.0)).into_array())
|
||||
.mul(1.0) as f32
|
||||
+ gen_ctx
|
||||
.hill_nz
|
||||
.get((wposf.div(500.0)).into_array())
|
||||
.mul(0.3) as f32)
|
||||
.add(0.3)
|
||||
.max(0.0);
|
||||
let (_, hill) = gen_cdf.hill[posi];
|
||||
|
||||
// FIXME: Currently unused, but should represent fresh groundwater level.
|
||||
// Should be correlated a little with humidity, somewhat negatively with altitude,
|
||||
@ -411,199 +649,38 @@ impl SimChunk {
|
||||
.into_array(),
|
||||
) as f32;
|
||||
|
||||
// "Base" of the chunk, to be multiplied by CONFIG.mountain_scale (multiplied value is
|
||||
// from -0.25 * (CONFIG.mountain_scale * 1.1) to 0.25 * (CONFIG.mountain_scale * 0.9),
|
||||
// but value here is from -0.275 to 0.225).
|
||||
let alt_base_pre = (gen_ctx.alt_nz.get((wposf.div(12_000.0)).into_array()) as f32);
|
||||
|
||||
let alt_base = alt_base_pre
|
||||
.sub(0.1)
|
||||
.mul(0.25);
|
||||
|
||||
// Extension upwards from the base. A positive number from 0 to 1 curved to be maximal at
|
||||
// 0.
|
||||
let alt_main_pre = (gen_ctx.alt_nz.get((wposf.div(2_000.0)).into_array()) as f32);
|
||||
let alt_main = alt_main_pre
|
||||
.abs()
|
||||
.powf(1.35);
|
||||
|
||||
// Calculates the smallest distance along an axis (x, y) from an edge of
|
||||
// the world. This value is maximal at WORLD_SIZE / 2 and minimized at the extremes
|
||||
// (0 or WORLD_SIZE on one or more axes). It then divides the quantity by cell_size,
|
||||
// so the final result is 1 when we are not in a cell along the edge of the world, and
|
||||
// ranges between 0 and 1 otherwise (lower when the chunk is closer to the edge).
|
||||
let map_edge_factor = pos
|
||||
.map2(WORLD_SIZE.map(|e| e as i32), |e, sz| {
|
||||
(sz / 2 - (e - sz / 2).abs()) as f32 / 16.0
|
||||
})
|
||||
.reduce_partial_min()
|
||||
.max(0.0)
|
||||
.min(1.0);
|
||||
|
||||
// chaos produces a value in [0.1, 1.24]. It is a meta-level factor intended to reflect how
|
||||
// "chaotic" the region is--how much weird stuff is going on on this terrain.
|
||||
//
|
||||
// First, we calculate chaos_pre, which is chaos with no filter and no temperature
|
||||
// flattening (so it is between [0, 1.24] instead of [0.1, 1.24]. This is used to break
|
||||
// the cyclic dependency between temperature and altitude (altitude relies on chaos, which
|
||||
// relies on temperature, but we also want temperature to rely on altitude. We recompute
|
||||
// altitude with the temperature incorporated after we figure out temperature).
|
||||
let chaos_pre = (gen_ctx.chaos_nz.get((wposf.div(3_000.0)).into_array()) as f32)
|
||||
.add(1.0)
|
||||
.mul(0.5)
|
||||
// [0, 1] * [0.25, 1] = [0, 1] (but probably towards the lower end)
|
||||
.mul(
|
||||
(gen_ctx.chaos_nz.get((wposf.div(6_000.0)).into_array()) as f32)
|
||||
.abs()
|
||||
.max(0.25)
|
||||
.min(1.0),
|
||||
)
|
||||
// Chaos is always increased by a little when we're on a hill (but remember that hill
|
||||
// is 0 about 50% of the time).
|
||||
// [0, 1] + 0.15 * [0, 1.6] = [0, 1.24]
|
||||
.add(0.15 * hill);
|
||||
|
||||
// This is the extension upwards from the base added to some extra noise from -1 to 1.
|
||||
// The extra noise is multiplied by alt_main (the base part of the extension) clamped to
|
||||
// be between 0.25 and 1, and made 60% larger (so the extra noise is between -1.6 and 1.6,
|
||||
// and the final noise is never more than 160% or less than 40% of the original noise,
|
||||
// depending on altitude).
|
||||
// Adding this to alt_main thus yields a value between -0.4 (if alt_main = 0 and
|
||||
// gen_ctx = -1) and 2.6 (if alt_main = 1 and gen_ctx = 1). When the generated small_nz
|
||||
// value hits -0.625 the value crosses 0, so most of the points are above 0.
|
||||
//
|
||||
// Then, we add 1 and divide by 2 to get a value between 0.3 and 1.8.
|
||||
let alt_pre = (0.0
|
||||
+ alt_main
|
||||
+ (gen_ctx.small_nz.get((wposf.div(300.0)).into_array()) as f32)
|
||||
.mul(alt_main.max(0.25))
|
||||
.mul(1.6))
|
||||
.add(1.0)
|
||||
.mul(0.5);
|
||||
|
||||
// 0 to 1, hopefully.
|
||||
let humid_base =
|
||||
(gen_ctx.humid_nz.get(wposf.div(1024.0).into_array()) as f32)
|
||||
.add(1.0)
|
||||
.mul(0.5)
|
||||
as f32;
|
||||
|
||||
// Ideally, humidity is correlated negatively with altitude and slightly positively with
|
||||
// dryness. For now we just do "negatively with altitude." We currently opt not to have
|
||||
// it affected by temperature. Negative humidity is lower, positive humidity is higher.
|
||||
//
|
||||
// Because we want to start at 0, rise, and then saturate at 1, we use a cumulative logistic
|
||||
// distribution, calculated as:
|
||||
//
|
||||
// 1/2 + 1/2 * tanh((x - μ) / (2s))
|
||||
//
|
||||
// where x is the random variable (altitude relative to sea level without mountain
|
||||
// scaling), μ is the altitude where humidity should be at its midpoint (currently set to 0.125),
|
||||
// and s is the scale parameter proportional to the standard deviation σ of the humidity
|
||||
// function of altitude (s = √3/π * σ). Currently we set σ to -0.0625, so we get ~ 68% of
|
||||
// the variation due to altitude between .0625 * mountain_scale above sea level and
|
||||
// 0.1875 * mountain_scale above sea level (it is negative to make the distribution higher when
|
||||
// the altitude is lower).
|
||||
let humid_alt_sigma = -0.0625;
|
||||
let humid_alt_2s = 3.0f32.sqrt().mul(f32::consts::FRAC_2_PI).mul(humid_alt_sigma);
|
||||
let humid_alt_mu = 0.125;
|
||||
// We ignore sea level because we actually want to be relative to sea level here and want
|
||||
// things in CONFIG.mountain_scale units, and we are using the version of chaos that doesn't
|
||||
// know about temperature. Otherwise, this is a correct altitude calculation.
|
||||
let humid_alt_pre = (alt_base + alt_pre.mul(chaos_pre.max(0.1))) * map_edge_factor;
|
||||
let humid_alt = humid_alt_pre
|
||||
.sub(humid_alt_mu)
|
||||
.div(humid_alt_2s)
|
||||
.tanh()
|
||||
.mul(0.5)
|
||||
.add(0.5);
|
||||
|
||||
// The log-logistic distribution (a variable whose logarithm has a logistic distribution) is often
|
||||
// used to model stream flow rates and precipitation as a tractable analogue of a log-normal
|
||||
// distribution. We use it here for humidity.
|
||||
//
|
||||
// Specifically, we treat altitude
|
||||
//
|
||||
// For a log-logistic distribution, you have
|
||||
//
|
||||
// X = e^
|
||||
//
|
||||
// where α is a scale parameter (the median of the distribution, where μ = ln(α)), β is a
|
||||
// shape parameter related to the standard deviation (s = 1 / β)
|
||||
//
|
||||
// Start with e^(altitude difference) to get values in (0, 1) for low altitudes (-∞, e) and
|
||||
// in [1, ∞) for high altitudes [e, ∞).
|
||||
//
|
||||
// The produced variable is in a log-normal distribution (that is, X's *logarithm* is
|
||||
// normally distributed).
|
||||
//
|
||||
// https://en.wikipedia.org/wiki/Log-logistic_distribution
|
||||
//
|
||||
// A log-logistic distribution represents the probability distribution of a random variable
|
||||
// whose logarithm has a logistic distribution.
|
||||
//
|
||||
// That is, ln X varies smoothly from 0 to 1 along an S-curve.
|
||||
//
|
||||
// Now we can
|
||||
//
|
||||
// 1 to
|
||||
// for high.
|
||||
// We want negative values for altitude to represent
|
||||
//
|
||||
// e^-2
|
||||
//
|
||||
// (alt mag)^(climate mag)
|
||||
//
|
||||
// (2)^(-1)
|
||||
//
|
||||
let (_, alt_base) = gen_cdf.alt_base[posi];
|
||||
let map_edge_factor = map_edge_factor(posi);
|
||||
let (_, chaos_pre) = gen_cdf.chaos_pre[posi];
|
||||
let (_, alt_pre) = gen_cdf.alt_main[posi];
|
||||
let (humid_base, _) = gen_cdf.humid_base[posi];
|
||||
let (alt_uniform, _) = gen_cdf.alt_pre[posi];
|
||||
// let (humid_small, _) = gen_cdf.humid_small[posi];
|
||||
|
||||
// Take the weighted average of our randomly generated base humidity, the scaled
|
||||
// negative altitude, and other random variable (to add some noise) to yield the
|
||||
// final humidity.
|
||||
const WEIGHTS : [f32; 4] = [3.0, 1.0, 1.0, 1.0];
|
||||
// final humidity. Note that we are using the "old" version of chaos here.
|
||||
const HUMID_WEIGHTS : [f32; 2] = [1.0, 1.0/*, 0.5*/];
|
||||
let humidity = cdf_irwin_hall(
|
||||
&WEIGHTS,
|
||||
&HUMID_WEIGHTS,
|
||||
[humid_base,
|
||||
alt_main_pre.mul(0.5).add(0.5),
|
||||
alt_base_pre.mul(0.5).add(0.5),
|
||||
(gen_ctx.small_nz.get((wposf.div(500.0)).into_array()) as f32)
|
||||
.mul(0.5)
|
||||
.add(0.5)]
|
||||
);
|
||||
/* // Now we just take a (currently) unweighted average of our randomly generated base humidity
|
||||
// (from scaled to be from 0 to 1) and our randomly generated "base" humidity. We can
|
||||
// adjust this weighting factor as desired.
|
||||
let humid_weight = 3.0;
|
||||
let humid_alt_weight = 1.0;
|
||||
let humidity =
|
||||
humid_base.mul(humid_weight)
|
||||
.add(humid_alt
|
||||
.mul(humid_alt_weight)
|
||||
// Adds some noise to the humidity effect of altitude to dampen it.
|
||||
.mul((gen_ctx.small_nz.get((wposf.div(500.0)).into_array()) as f32)
|
||||
.mul(0.5)
|
||||
.add(0.5)))
|
||||
.div(humid_weight + humid_alt_weight); */
|
||||
1.0 - alt_uniform,
|
||||
// humid_small
|
||||
]);
|
||||
|
||||
let temp_base =
|
||||
gen_ctx.temp_nz.get((wposf.div(12000.0)).into_array()) as f32;
|
||||
// We also correlate temperature negatively with altitude using a different computed factor
|
||||
// that we use for humidity (and with different weighting). We could definitely make the
|
||||
// distribution different for temperature as well.
|
||||
let temp_alt_sigma = -0.0625;
|
||||
let temp_alt_2s = 3.0f32.sqrt().mul(f32::consts::FRAC_2_PI).mul(temp_alt_sigma);
|
||||
let temp_alt_mu = 0.0625;
|
||||
// Scaled to [-1, 1] already.
|
||||
let temp_alt = humid_alt_pre
|
||||
.sub(temp_alt_mu)
|
||||
.div(temp_alt_2s)
|
||||
.tanh();
|
||||
let temp_weight = 2.0;
|
||||
let temp_alt_weight = 1.0;
|
||||
let temp =
|
||||
temp_base.mul(temp_weight)
|
||||
.add(temp_alt.mul(temp_alt_weight))
|
||||
.div(temp_weight + temp_alt_weight);
|
||||
let (temp_base, _) = gen_cdf.temp_base[posi];
|
||||
|
||||
// We also correlate temperature negatively with altitude using different weighting than we
|
||||
// use for humidity.
|
||||
const TEMP_WEIGHTS: [f32; 2] = [2.0, 1.0];
|
||||
let temp = cdf_irwin_hall(
|
||||
&TEMP_WEIGHTS,
|
||||
[temp_base,
|
||||
1.0 - alt_uniform,
|
||||
])
|
||||
// Convert to [-1, 1]
|
||||
.sub(0.5)
|
||||
.mul(2.0);
|
||||
|
||||
// Now, we finish the computation of chaos incorporating temperature information, producing
|
||||
// a value in [0.1, 1.24].
|
||||
@ -643,9 +720,9 @@ impl SimChunk {
|
||||
.add(0.05)
|
||||
.max(0.0)
|
||||
.min(1.0)
|
||||
.mul(0.4)
|
||||
.mul(0.9)
|
||||
// Tree density should go (by a lot) with humidity.
|
||||
.add(humidity.mul(0.6))
|
||||
.powf(1.0 - humidity)
|
||||
// No trees in the ocean (currently), no trees in true deserts.
|
||||
.mul(if alt > CONFIG.sea_level + 5.0 && humidity > CONFIG.desert_hum {
|
||||
1.0
|
||||
@ -654,31 +731,6 @@ impl SimChunk {
|
||||
})
|
||||
.max(0.0);
|
||||
|
||||
// let humid_normal = InverseGamma::new(4.0, 0.1).unwrap();
|
||||
// let humid_normal = LogNormal::new(0.0, 0.1).unwrap();
|
||||
let humid_normal = Gamma::new(1.0, 0.5).unwrap();
|
||||
// let humid_normal = Gamma::new(0.1, 1.0).unwrap();
|
||||
// let humid_normal = Normal::new(0.5, 0.05).unwrap();
|
||||
/*if humid_normal.cdf(humid_base as f64) > 0.9 *//* {
|
||||
println!("HIGH HUMIDITY: {:?}", humid_base);
|
||||
} */
|
||||
if pos == Vec2::new(1023, 1023) {
|
||||
let mut noise = (0..1024*1024).map( |i| {
|
||||
let wposf = Vec2::new(i as f64 / 1024.0, i as f64 % 1024.0);
|
||||
gen_ctx.humid_nz.get(wposf.div(1024.0).into_array()) as f32
|
||||
} ).collect::<Vec<_>>();
|
||||
noise.sort_unstable_by( |f, g| f.partial_cmp(g).unwrap() );
|
||||
for (k, f) in noise.iter().enumerate().step_by(1024 * 1024 / 100) {
|
||||
println!("{:?}%: {:?}, ", k / (1024 * 1024 / 100), f);
|
||||
}
|
||||
}
|
||||
/* if alt_main_pre.mul(0.5).add(0.5) > 0.7 {
|
||||
println!("HIGH: {:?}", alt_main_pre);
|
||||
} */
|
||||
/* if humidity > CONFIG.jungle_hum {
|
||||
println!("JUNGLE");
|
||||
} */
|
||||
|
||||
Self {
|
||||
chaos,
|
||||
alt_base,
|
||||
@ -744,7 +796,7 @@ impl SimChunk {
|
||||
/* if tree_density > 0.0 {
|
||||
println!("Mangroves (forest): {:?}, altitude: {:?}, humidity: {:?}, temperature: {:?}, density: {:?}", wposf, alt, humidity, temp, tree_density);
|
||||
} */
|
||||
ForestKind::Mangrove
|
||||
ForestKind::Oak
|
||||
} else if humidity > CONFIG.forest_hum {
|
||||
// Moderate climate, moderate humidity.
|
||||
ForestKind::Oak
|
||||
|
Loading…
Reference in New Issue
Block a user