mirror of
https://gitlab.com/veloren/veloren.git
synced 2024-08-30 18:12:32 +00:00
Merge branch 'xMAC94x/update_toolchain' into 'master'
update toolchain to 2023-09-28 See merge request veloren/veloren!4121
This commit is contained in:
commit
eaeeb0165f
@ -13,7 +13,7 @@ variables:
|
|||||||
# https://docs.gitlab.com/ee/ci/yaml/#shallow-cloning
|
# https://docs.gitlab.com/ee/ci/yaml/#shallow-cloning
|
||||||
GIT_DEPTH: 3
|
GIT_DEPTH: 3
|
||||||
GIT_CLEAN_FLAGS: -f
|
GIT_CLEAN_FLAGS: -f
|
||||||
CACHE_IMAGE_TAG: d74ceb0a
|
CACHE_IMAGE_TAG: d551c14a
|
||||||
TAG_REGEX: '/^v[0-9]+\.[0-9]+\.[0-9]+$/'
|
TAG_REGEX: '/^v[0-9]+\.[0-9]+\.[0-9]+$/'
|
||||||
|
|
||||||
default:
|
default:
|
||||||
|
46
Cargo.lock
generated
46
Cargo.lock
generated
@ -2289,7 +2289,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "gfx-auxil"
|
name = "gfx-auxil"
|
||||||
version = "0.9.0"
|
version = "0.9.0"
|
||||||
source = "git+https://github.com/gfx-rs/gfx?rev=27a1dae3796d33d23812f2bb8c7e3b5aea18b521#27a1dae3796d33d23812f2bb8c7e3b5aea18b521"
|
source = "git+https://github.com/Imberflur/gfx.git?tag=veloren-fixes-v1#a8ba0a4859abb5f980b02480cb219030fb64530c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"fxhash",
|
"fxhash",
|
||||||
"gfx-hal",
|
"gfx-hal",
|
||||||
@ -2299,9 +2299,9 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "gfx-backend-dx11"
|
name = "gfx-backend-dx11"
|
||||||
version = "0.8.0"
|
version = "0.8.0"
|
||||||
source = "git+https://github.com/gfx-rs/gfx?rev=27a1dae3796d33d23812f2bb8c7e3b5aea18b521#27a1dae3796d33d23812f2bb8c7e3b5aea18b521"
|
source = "git+https://github.com/Imberflur/gfx.git?tag=veloren-fixes-v1#a8ba0a4859abb5f980b02480cb219030fb64530c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrayvec 0.5.2",
|
"arrayvec 0.7.4",
|
||||||
"bitflags 1.3.2",
|
"bitflags 1.3.2",
|
||||||
"gfx-auxil",
|
"gfx-auxil",
|
||||||
"gfx-hal",
|
"gfx-hal",
|
||||||
@ -2320,9 +2320,9 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "gfx-backend-dx12"
|
name = "gfx-backend-dx12"
|
||||||
version = "0.8.0"
|
version = "0.8.0"
|
||||||
source = "git+https://github.com/gfx-rs/gfx?rev=27a1dae3796d33d23812f2bb8c7e3b5aea18b521#27a1dae3796d33d23812f2bb8c7e3b5aea18b521"
|
source = "git+https://github.com/Imberflur/gfx.git?tag=veloren-fixes-v1#a8ba0a4859abb5f980b02480cb219030fb64530c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrayvec 0.5.2",
|
"arrayvec 0.7.4",
|
||||||
"bit-set",
|
"bit-set",
|
||||||
"bitflags 1.3.2",
|
"bitflags 1.3.2",
|
||||||
"d3d12",
|
"d3d12",
|
||||||
@ -2341,7 +2341,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "gfx-backend-empty"
|
name = "gfx-backend-empty"
|
||||||
version = "0.8.0"
|
version = "0.8.0"
|
||||||
source = "git+https://github.com/gfx-rs/gfx?rev=27a1dae3796d33d23812f2bb8c7e3b5aea18b521#27a1dae3796d33d23812f2bb8c7e3b5aea18b521"
|
source = "git+https://github.com/Imberflur/gfx.git?tag=veloren-fixes-v1#a8ba0a4859abb5f980b02480cb219030fb64530c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"gfx-hal",
|
"gfx-hal",
|
||||||
"log",
|
"log",
|
||||||
@ -2351,9 +2351,9 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "gfx-backend-gl"
|
name = "gfx-backend-gl"
|
||||||
version = "0.8.1"
|
version = "0.8.1"
|
||||||
source = "git+https://github.com/gfx-rs/gfx?rev=27a1dae3796d33d23812f2bb8c7e3b5aea18b521#27a1dae3796d33d23812f2bb8c7e3b5aea18b521"
|
source = "git+https://github.com/Imberflur/gfx.git?tag=veloren-fixes-v1#a8ba0a4859abb5f980b02480cb219030fb64530c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrayvec 0.5.2",
|
"arrayvec 0.7.4",
|
||||||
"bitflags 1.3.2",
|
"bitflags 1.3.2",
|
||||||
"fxhash",
|
"fxhash",
|
||||||
"gfx-auxil",
|
"gfx-auxil",
|
||||||
@ -2374,9 +2374,9 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "gfx-backend-metal"
|
name = "gfx-backend-metal"
|
||||||
version = "0.8.1"
|
version = "0.8.1"
|
||||||
source = "git+https://github.com/gfx-rs/gfx?rev=27a1dae3796d33d23812f2bb8c7e3b5aea18b521#27a1dae3796d33d23812f2bb8c7e3b5aea18b521"
|
source = "git+https://github.com/Imberflur/gfx.git?tag=veloren-fixes-v1#a8ba0a4859abb5f980b02480cb219030fb64530c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrayvec 0.5.2",
|
"arrayvec 0.7.4",
|
||||||
"bitflags 1.3.2",
|
"bitflags 1.3.2",
|
||||||
"block",
|
"block",
|
||||||
"cocoa-foundation",
|
"cocoa-foundation",
|
||||||
@ -2400,9 +2400,9 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "gfx-backend-vulkan"
|
name = "gfx-backend-vulkan"
|
||||||
version = "0.8.0"
|
version = "0.8.0"
|
||||||
source = "git+https://github.com/gfx-rs/gfx?rev=27a1dae3796d33d23812f2bb8c7e3b5aea18b521#27a1dae3796d33d23812f2bb8c7e3b5aea18b521"
|
source = "git+https://github.com/Imberflur/gfx.git?tag=veloren-fixes-v1#a8ba0a4859abb5f980b02480cb219030fb64530c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrayvec 0.5.2",
|
"arrayvec 0.7.4",
|
||||||
"ash",
|
"ash",
|
||||||
"byteorder",
|
"byteorder",
|
||||||
"core-graphics-types",
|
"core-graphics-types",
|
||||||
@ -2422,7 +2422,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "gfx-hal"
|
name = "gfx-hal"
|
||||||
version = "0.8.0"
|
version = "0.8.0"
|
||||||
source = "git+https://github.com/gfx-rs/gfx?rev=27a1dae3796d33d23812f2bb8c7e3b5aea18b521#27a1dae3796d33d23812f2bb8c7e3b5aea18b521"
|
source = "git+https://github.com/Imberflur/gfx.git?tag=veloren-fixes-v1#a8ba0a4859abb5f980b02480cb219030fb64530c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 1.3.2",
|
"bitflags 1.3.2",
|
||||||
"naga",
|
"naga",
|
||||||
@ -3250,12 +3250,6 @@ dependencies = [
|
|||||||
"windows-sys 0.48.0",
|
"windows-sys 0.48.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "libm"
|
|
||||||
version = "0.1.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "7fc7aa29613bd6a620df431842069224d8bc9011086b1db4c0e0cd47fa03ec9a"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libm"
|
name = "libm"
|
||||||
version = "0.2.7"
|
version = "0.2.7"
|
||||||
@ -4005,7 +3999,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2"
|
checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"autocfg",
|
"autocfg",
|
||||||
"libm 0.2.7",
|
"libm",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -4254,13 +4248,13 @@ dependencies = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "packed_simd_2"
|
name = "packed_simd"
|
||||||
version = "0.3.8"
|
version = "0.3.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a1914cd452d8fccd6f9db48147b29fd4ae05bea9dc5d9ad578509f72415de282"
|
checksum = "1f9f08af0c877571712e2e3e686ad79efad9657dbf0f7c3c8ba943ff6c38932d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if 1.0.0",
|
"cfg-if 1.0.0",
|
||||||
"libm 0.1.4",
|
"num-traits",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -4835,7 +4829,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "range-alloc"
|
name = "range-alloc"
|
||||||
version = "0.1.2"
|
version = "0.1.2"
|
||||||
source = "git+https://github.com/gfx-rs/gfx?rev=27a1dae3796d33d23812f2bb8c7e3b5aea18b521#27a1dae3796d33d23812f2bb8c7e3b5aea18b521"
|
source = "git+https://github.com/Imberflur/gfx.git?tag=veloren-fixes-v1#a8ba0a4859abb5f980b02480cb219030fb64530c"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "raw-window-handle"
|
name = "raw-window-handle"
|
||||||
@ -7315,7 +7309,7 @@ dependencies = [
|
|||||||
"num 0.4.1",
|
"num 0.4.1",
|
||||||
"num-traits",
|
"num-traits",
|
||||||
"ordered-float 3.9.1",
|
"ordered-float 3.9.1",
|
||||||
"packed_simd_2",
|
"packed_simd",
|
||||||
"rand 0.8.5",
|
"rand 0.8.5",
|
||||||
"rand_chacha 0.3.1",
|
"rand_chacha 0.3.1",
|
||||||
"rayon",
|
"rayon",
|
||||||
|
10
Cargo.toml
10
Cargo.toml
@ -1,6 +1,7 @@
|
|||||||
cargo-features = ["named-profiles","profile-overrides"]
|
cargo-features = ["named-profiles","profile-overrides"]
|
||||||
|
|
||||||
[workspace]
|
[workspace]
|
||||||
|
resolver = "2"
|
||||||
members = [
|
members = [
|
||||||
"common",
|
"common",
|
||||||
"common/assets",
|
"common/assets",
|
||||||
@ -165,6 +166,15 @@ wgpu = { git = "https://github.com/pythonesque/wgpu.git", rev = "179ea209374a928
|
|||||||
# ntapi 3.7 fails to compile under windows due to the bug https://github.com/MSxDOS/ntapi/pull/12
|
# ntapi 3.7 fails to compile under windows due to the bug https://github.com/MSxDOS/ntapi/pull/12
|
||||||
ntapi = { git = "https://github.com/MSxDOS/ntapi.git", rev = "9f56b149c9e25796739157c0fce3e0007a7de6eb" }
|
ntapi = { git = "https://github.com/MSxDOS/ntapi.git", rev = "9f56b149c9e25796739157c0fce3e0007a7de6eb" }
|
||||||
|
|
||||||
|
[patch."https://github.com/gfx-rs/gfx"]
|
||||||
|
gfx-hal = { git = "https://github.com/Imberflur/gfx.git", tag = "veloren-fixes-v1" }
|
||||||
|
gfx-backend-empty = { git = "https://github.com/Imberflur/gfx.git", tag = "veloren-fixes-v1" }
|
||||||
|
gfx-backend-vulkan = { git = "https://github.com/Imberflur/gfx.git", tag = "veloren-fixes-v1" }
|
||||||
|
gfx-backend-gl = { git = "https://github.com/Imberflur/gfx.git", tag = "veloren-fixes-v1" }
|
||||||
|
gfx-backend-dx12 = { git = "https://github.com/Imberflur/gfx.git", tag = "veloren-fixes-v1" }
|
||||||
|
gfx-backend-dx11 = { git = "https://github.com/Imberflur/gfx.git", tag = "veloren-fixes-v1" }
|
||||||
|
gfx-backend-metal = { git = "https://github.com/Imberflur/gfx.git", tag = "veloren-fixes-v1" }
|
||||||
|
|
||||||
# # use the latest fixes in naga (remove when updates trickle down to wgpu-rs)
|
# # use the latest fixes in naga (remove when updates trickle down to wgpu-rs)
|
||||||
# naga = { git = "https://github.com/gfx-rs/naga.git", rev = "3a0f0144112ff621dd7f731bf455adf6cab19164" }
|
# naga = { git = "https://github.com/gfx-rs/naga.git", rev = "3a0f0144112ff621dd7f731bf455adf6cab19164" }
|
||||||
# # use the latest fixes in gfx (remove when updates trickle down to wgpu-rs)
|
# # use the latest fixes in gfx (remove when updates trickle down to wgpu-rs)
|
||||||
|
@ -47,6 +47,7 @@ impl Tui {
|
|||||||
(Self { _handle: handle }, commands_r)
|
(Self { _handle: handle }, commands_r)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::needless_pass_by_ref_mut)]
|
||||||
pub fn process_command(cmd: &str, command_s: &mut async_channel::Sender<Cmd>) -> bool {
|
pub fn process_command(cmd: &str, command_s: &mut async_channel::Sender<Cmd>) -> bool {
|
||||||
let matches = Command::new("veloren-botclient")
|
let matches = Command::new("veloren-botclient")
|
||||||
.version(common::util::DISPLAY_VERSION_LONG.as_str())
|
.version(common::util::DISPLAY_VERSION_LONG.as_str())
|
||||||
|
@ -1809,18 +1809,16 @@ impl Client {
|
|||||||
// significant changes to this code. Here is the approximate order of
|
// significant changes to this code. Here is the approximate order of
|
||||||
// things. Please update it as this code changes.
|
// things. Please update it as this code changes.
|
||||||
//
|
//
|
||||||
// 1) Collect input from the frontend, apply input effects to the state
|
// 1) Collect input from the frontend, apply input effects to the state of the
|
||||||
// of the game
|
// game
|
||||||
// 2) Handle messages from the server
|
// 2) Handle messages from the server
|
||||||
// 3) Go through any events (timer-driven or otherwise) that need handling
|
// 3) Go through any events (timer-driven or otherwise) that need handling and
|
||||||
// and apply them to the state of the game
|
// apply them to the state of the game
|
||||||
// 4) Perform a single LocalState tick (i.e: update the world and entities
|
// 4) Perform a single LocalState tick (i.e: update the world and entities in
|
||||||
// in the world)
|
// the world)
|
||||||
// 5) Go through the terrain update queue and apply all changes
|
// 5) Go through the terrain update queue and apply all changes to the terrain
|
||||||
// to the terrain
|
|
||||||
// 6) Sync information to the server
|
// 6) Sync information to the server
|
||||||
// 7) Finish the tick, passing actions of the main thread back
|
// 7) Finish the tick, passing actions of the main thread back to the frontend
|
||||||
// to the frontend
|
|
||||||
|
|
||||||
// 1) Handle input from frontend.
|
// 1) Handle input from frontend.
|
||||||
// Pass character actions from frontend input to the player's entity.
|
// Pass character actions from frontend input to the player's entity.
|
||||||
|
@ -749,12 +749,12 @@ pub mod asset_tweak {
|
|||||||
|
|
||||||
run_with_file(tweak_path, |file| {
|
run_with_file(tweak_path, |file| {
|
||||||
file.write_all(
|
file.write_all(
|
||||||
br#"
|
br"
|
||||||
((
|
((
|
||||||
such: 5,
|
such: 5,
|
||||||
field: 35.752346,
|
field: 35.752346,
|
||||||
))
|
))
|
||||||
"#,
|
",
|
||||||
)
|
)
|
||||||
.expect("failed to write to the file");
|
.expect("failed to write to the file");
|
||||||
|
|
||||||
|
@ -100,11 +100,11 @@ fn input_validated_string(prompt: &str, check: &dyn Fn(&str) -> bool) -> String
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let prompt = r#"
|
let prompt = r"
|
||||||
Stub implementation.
|
Stub implementation.
|
||||||
If you want to migrate new assets, edit `v1` and `v2` modules.
|
If you want to migrate new assets, edit `v1` and `v2` modules.
|
||||||
If you want to migrate old assets, check commit history.
|
If you want to migrate old assets, check commit history.
|
||||||
"#;
|
";
|
||||||
println!("{prompt}");
|
println!("{prompt}");
|
||||||
|
|
||||||
let old_dir = input_validated_string(
|
let old_dir = input_validated_string(
|
||||||
|
@ -76,11 +76,11 @@ pub enum AreaKind {
|
|||||||
}
|
}
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref ALIGNMENTS: Vec<String> = vec!["wild", "enemy", "npc", "pet"]
|
static ref ALIGNMENTS: Vec<String> = ["wild", "enemy", "npc", "pet"]
|
||||||
.iter()
|
.iter()
|
||||||
.map(|s| s.to_string())
|
.map(|s| s.to_string())
|
||||||
.collect();
|
.collect();
|
||||||
static ref SKILL_TREES: Vec<String> = vec!["general", "sword", "axe", "hammer", "bow", "staff", "sceptre", "mining"]
|
static ref SKILL_TREES: Vec<String> = ["general", "sword", "axe", "hammer", "bow", "staff", "sceptre", "mining"]
|
||||||
.iter()
|
.iter()
|
||||||
.map(|s| s.to_string())
|
.map(|s| s.to_string())
|
||||||
.collect();
|
.collect();
|
||||||
@ -128,14 +128,14 @@ lazy_static! {
|
|||||||
.iter()
|
.iter()
|
||||||
.map(|o| o.to_string().to_string())
|
.map(|o| o.to_string().to_string())
|
||||||
.collect();
|
.collect();
|
||||||
static ref TIMES: Vec<String> = vec![
|
static ref TIMES: Vec<String> = [
|
||||||
"midnight", "night", "dawn", "morning", "day", "noon", "dusk"
|
"midnight", "night", "dawn", "morning", "day", "noon", "dusk"
|
||||||
]
|
]
|
||||||
.iter()
|
.iter()
|
||||||
.map(|s| s.to_string())
|
.map(|s| s.to_string())
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
static ref WEATHERS: Vec<String> = vec![
|
static ref WEATHERS: Vec<String> = [
|
||||||
"clear", "cloudy", "rain", "wind", "storm"
|
"clear", "cloudy", "rain", "wind", "storm"
|
||||||
]
|
]
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -118,17 +118,16 @@ pub fn members<'a>(
|
|||||||
) -> impl Iterator<Item = (specs::Entity, Role)> + 'a {
|
) -> impl Iterator<Item = (specs::Entity, Role)> + 'a {
|
||||||
(entities, groups, alignments, uids)
|
(entities, groups, alignments, uids)
|
||||||
.join()
|
.join()
|
||||||
.filter_map(move |(e, g, a, u)| {
|
.filter(move |&(_e, g, _a, _u)| (*g == group))
|
||||||
(*g == group).then(|| {
|
.map(|(e, _g, a, u)| {
|
||||||
(
|
(
|
||||||
e,
|
e,
|
||||||
if matches!(a, Alignment::Owned(owner) if owner != u) {
|
if matches!(a, Alignment::Owned(owner) if owner != u) {
|
||||||
Role::Pet
|
Role::Pet
|
||||||
} else {
|
} else {
|
||||||
Role::Member
|
Role::Member
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -121,16 +121,8 @@ impl Hands {
|
|||||||
Hands::InHands((mainhand, offhand)) => {
|
Hands::InHands((mainhand, offhand)) => {
|
||||||
let mut from_spec = |i: &ItemSpec| i.try_to_item(rng);
|
let mut from_spec = |i: &ItemSpec| i.try_to_item(rng);
|
||||||
|
|
||||||
let mainhand = mainhand
|
let mainhand = mainhand.as_ref().map(&mut from_spec).transpose()?.flatten();
|
||||||
.as_ref()
|
let offhand = offhand.as_ref().map(&mut from_spec).transpose()?.flatten();
|
||||||
.map(|i| from_spec(i))
|
|
||||||
.transpose()?
|
|
||||||
.flatten();
|
|
||||||
let offhand = offhand
|
|
||||||
.as_ref()
|
|
||||||
.map(|i| from_spec(i))
|
|
||||||
.transpose()?
|
|
||||||
.flatten();
|
|
||||||
Ok((mainhand, offhand))
|
Ok((mainhand, offhand))
|
||||||
},
|
},
|
||||||
Hands::Choice(pairs) => {
|
Hands::Choice(pairs) => {
|
||||||
|
@ -649,7 +649,7 @@ impl Inventory {
|
|||||||
self.get(inv_slot)
|
self.get(inv_slot)
|
||||||
.and_then(|item| self.loadout.get_slot_to_equip_into(&item.kind()))
|
.and_then(|item| self.loadout.get_slot_to_equip_into(&item.kind()))
|
||||||
.map(|equip_slot| self.swap_inventory_loadout(inv_slot, equip_slot, time))
|
.map(|equip_slot| self.swap_inventory_loadout(inv_slot, equip_slot, time))
|
||||||
.unwrap_or_else(Vec::new)
|
.unwrap_or_default()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Determines how many free inventory slots will be left after equipping an
|
/// Determines how many free inventory slots will be left after equipping an
|
||||||
|
@ -1185,7 +1185,7 @@ mod tests {
|
|||||||
init();
|
init();
|
||||||
info!("init");
|
info!("init");
|
||||||
|
|
||||||
let mut stock: hashbrown::HashMap<Good, f32> = vec![
|
let mut stock: hashbrown::HashMap<Good, f32> = [
|
||||||
(Good::Ingredients, 50.0),
|
(Good::Ingredients, 50.0),
|
||||||
(Good::Tools, 10.0),
|
(Good::Tools, 10.0),
|
||||||
(Good::Armor, 10.0),
|
(Good::Armor, 10.0),
|
||||||
|
@ -517,7 +517,7 @@ impl SkillSet {
|
|||||||
// Perform all mutation inside this branch, to avoid triggering a copy
|
// Perform all mutation inside this branch, to avoid triggering a copy
|
||||||
// on write or flagged storage in cases where this matters.
|
// on write or flagged storage in cases where this matters.
|
||||||
let this_ = to_mut(this_);
|
let this_ = to_mut(this_);
|
||||||
let mut this = this_.borrow_mut();
|
let this = this_.borrow_mut();
|
||||||
// NOTE: Verified to exist previously when we accessed
|
// NOTE: Verified to exist previously when we accessed
|
||||||
// this.skill_groups (assuming a non-pathological implementation of
|
// this.skill_groups (assuming a non-pathological implementation of
|
||||||
// ToOwned).
|
// ToOwned).
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::{
|
use std::{
|
||||||
cmp::{Eq, Ord, Ordering, PartialEq, PartialOrd},
|
cmp::{Eq, Ord, PartialEq, PartialOrd},
|
||||||
fmt, hash,
|
fmt,
|
||||||
marker::PhantomData,
|
marker::PhantomData,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Type safe index into Depot
|
/// Type safe index into Depot
|
||||||
#[derive(Deserialize, Serialize)]
|
#[derive(Deserialize, Serialize, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
pub struct Id<T> {
|
pub struct Id<T> {
|
||||||
idx: u32,
|
idx: u32,
|
||||||
gen: u32,
|
gen: u32,
|
||||||
@ -17,26 +17,6 @@ impl<T> Id<T> {
|
|||||||
pub fn id(&self) -> u64 { self.idx as u64 | ((self.gen as u64) << 32) }
|
pub fn id(&self) -> u64 { self.idx as u64 | ((self.gen as u64) << 32) }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Copy for Id<T> {}
|
|
||||||
impl<T> Clone for Id<T> {
|
|
||||||
fn clone(&self) -> Self {
|
|
||||||
Self {
|
|
||||||
idx: self.idx,
|
|
||||||
gen: self.gen,
|
|
||||||
phantom: PhantomData,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl<T> Eq for Id<T> {}
|
|
||||||
impl<T> PartialEq for Id<T> {
|
|
||||||
fn eq(&self, other: &Self) -> bool { self.idx == other.idx && self.gen == other.gen }
|
|
||||||
}
|
|
||||||
impl<T> Ord for Id<T> {
|
|
||||||
fn cmp(&self, other: &Self) -> Ordering { (self.idx, self.gen).cmp(&(other.idx, other.gen)) }
|
|
||||||
}
|
|
||||||
impl<T> PartialOrd for Id<T> {
|
|
||||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) }
|
|
||||||
}
|
|
||||||
impl<T> fmt::Debug for Id<T> {
|
impl<T> fmt::Debug for Id<T> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(
|
write!(
|
||||||
@ -48,12 +28,6 @@ impl<T> fmt::Debug for Id<T> {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<T> hash::Hash for Id<T> {
|
|
||||||
fn hash<H: hash::Hasher>(&self, h: &mut H) {
|
|
||||||
self.idx.hash(h);
|
|
||||||
self.gen.hash(h);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct Entry<T> {
|
struct Entry<T> {
|
||||||
gen: u32,
|
gen: u32,
|
||||||
|
@ -12,9 +12,9 @@
|
|||||||
type_alias_impl_trait,
|
type_alias_impl_trait,
|
||||||
extend_one,
|
extend_one,
|
||||||
arbitrary_self_types,
|
arbitrary_self_types,
|
||||||
int_roundings
|
int_roundings,
|
||||||
|
hash_extract_if
|
||||||
)]
|
)]
|
||||||
#![feature(hash_drain_filter)]
|
|
||||||
|
|
||||||
pub use common_assets as assets;
|
pub use common_assets as assets;
|
||||||
pub use uuid;
|
pub use uuid;
|
||||||
|
@ -133,8 +133,12 @@ pub fn distribute_many<T: Copy + Eq + Hash, I>(
|
|||||||
|
|
||||||
let Some(mut give) = participants
|
let Some(mut give) = participants
|
||||||
.iter()
|
.iter()
|
||||||
.map(|participant| (total_item_amount as f32 * participant.weight / total_weight).ceil() as u32 - participant.recieved_count)
|
.map(|participant| {
|
||||||
.min() else {
|
(total_item_amount as f32 * participant.weight / total_weight).ceil() as u32
|
||||||
|
- participant.recieved_count
|
||||||
|
})
|
||||||
|
.min()
|
||||||
|
else {
|
||||||
tracing::error!("Tried to distribute items to no participants.");
|
tracing::error!("Tried to distribute items to no participants.");
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
@ -152,8 +156,7 @@ pub fn distribute_many<T: Copy + Eq + Hash, I>(
|
|||||||
|
|
||||||
let participant_count = participants.len();
|
let participant_count = participants.len();
|
||||||
|
|
||||||
let Some(winner) = participants
|
let Some(winner) = participants.get_mut(index) else {
|
||||||
.get_mut(index) else {
|
|
||||||
tracing::error!("Tried to distribute items to no participants.");
|
tracing::error!("Tried to distribute items to no participants.");
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
@ -380,8 +380,9 @@ impl Link for VolumeMounting {
|
|||||||
Volume::Terrain => &*terrain_riders,
|
Volume::Terrain => &*terrain_riders,
|
||||||
Volume::Entity(uid) => {
|
Volume::Entity(uid) => {
|
||||||
let Some(riders) = entity(uid)
|
let Some(riders) = entity(uid)
|
||||||
.filter(|entity| is_alive(*entity))
|
.filter(|entity| is_alive(*entity))
|
||||||
.and_then(|entity| volume_riders.get(entity)) else {
|
.and_then(|entity| volume_riders.get(entity))
|
||||||
|
else {
|
||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
riders
|
riders
|
||||||
|
@ -16,7 +16,7 @@ impl<T> Id<T> {
|
|||||||
|
|
||||||
impl<T> Copy for Id<T> {}
|
impl<T> Copy for Id<T> {}
|
||||||
impl<T> Clone for Id<T> {
|
impl<T> Clone for Id<T> {
|
||||||
fn clone(&self) -> Self { Self(self.0, PhantomData) }
|
fn clone(&self) -> Self { *self }
|
||||||
}
|
}
|
||||||
impl<T> Eq for Id<T> {}
|
impl<T> Eq for Id<T> {}
|
||||||
impl<T> PartialEq for Id<T> {
|
impl<T> PartialEq for Id<T> {
|
||||||
|
@ -176,7 +176,7 @@ impl MapSizeLg {
|
|||||||
map_size_lg.y + TERRAIN_CHUNK_BLOCKS_LG < 32;
|
map_size_lg.y + TERRAIN_CHUNK_BLOCKS_LG < 32;
|
||||||
// Assertion on dimensions: product of dimensions must fit in a usize.
|
// Assertion on dimensions: product of dimensions must fit in a usize.
|
||||||
let chunks_product_in_range =
|
let chunks_product_in_range =
|
||||||
matches!(1usize.checked_shl(map_size_lg.x + map_size_lg.y), Some(_));
|
1usize.checked_shl(map_size_lg.x + map_size_lg.y).is_some();
|
||||||
if blocks_in_range && chunks_product_in_range {
|
if blocks_in_range && chunks_product_in_range {
|
||||||
// Cleared all invariants.
|
// Cleared all invariants.
|
||||||
Ok(MapSizeLg(map_size_lg))
|
Ok(MapSizeLg(map_size_lg))
|
||||||
|
@ -86,10 +86,10 @@ impl<V, S: VolSize, M> Chunk<V, S, M> {
|
|||||||
//
|
//
|
||||||
// Rationales:
|
// Rationales:
|
||||||
//
|
//
|
||||||
// 1. We have code in the implementation that assumes it. In particular,
|
// 1. We have code in the implementation that assumes it. In particular, code
|
||||||
// code using `.count_ones()`.
|
// using `.count_ones()`.
|
||||||
// 2. The maximum group size is `256x256x256`, because there's code that
|
// 2. The maximum group size is `256x256x256`, because there's code that stores
|
||||||
// stores group relative indices as `u8`.
|
// group relative indices as `u8`.
|
||||||
// 3. There's code that stores group indices as `u8`.
|
// 3. There's code that stores group indices as `u8`.
|
||||||
debug_assert!(S::SIZE.x.is_power_of_two());
|
debug_assert!(S::SIZE.x.is_power_of_two());
|
||||||
debug_assert!(S::SIZE.y.is_power_of_two());
|
debug_assert!(S::SIZE.y.is_power_of_two());
|
||||||
|
@ -84,7 +84,9 @@ pub(crate) fn wasi_fd_write(
|
|||||||
let Ok(cio) = iov_addr
|
let Ok(cio) = iov_addr
|
||||||
.add_offset(i)
|
.add_offset(i)
|
||||||
.and_then(|p| p.read(&memory.view(&store)))
|
.and_then(|p| p.read(&memory.view(&store)))
|
||||||
else { return Errno::Memviolation as i32; };
|
else {
|
||||||
|
return Errno::Memviolation as i32;
|
||||||
|
};
|
||||||
if let Err(e) = print_impl(env.data(), &store, cio.buf, cio.buf_len) {
|
if let Err(e) = print_impl(env.data(), &store, cio.buf, cio.buf_len) {
|
||||||
return e as i32;
|
return e as i32;
|
||||||
}
|
}
|
||||||
|
@ -178,7 +178,7 @@ impl<T: Event> PreparedEventQuery<T> {
|
|||||||
Ok(Self {
|
Ok(Self {
|
||||||
bytes: bincode::serialize(&event).map_err(PluginError::Encoding)?,
|
bytes: bincode::serialize(&event).map_err(PluginError::Encoding)?,
|
||||||
function_name: event.get_event_name(),
|
function_name: event.get_event_name(),
|
||||||
_phantom: PhantomData::default(),
|
_phantom: PhantomData,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#![feature(drain_filter, let_chains)]
|
#![feature(extract_if, let_chains)]
|
||||||
#![allow(clippy::option_map_unit_fn)]
|
#![allow(clippy::option_map_unit_fn)]
|
||||||
|
|
||||||
mod aura;
|
mod aura;
|
||||||
|
@ -53,27 +53,35 @@ impl<'a> System<'a> for Sys {
|
|||||||
// For each mount...
|
// For each mount...
|
||||||
for (entity, is_mount, body) in (&entities, &is_mounts, bodies.maybe()).join() {
|
for (entity, is_mount, body) in (&entities, &is_mounts, bodies.maybe()).join() {
|
||||||
// ...find the rider...
|
// ...find the rider...
|
||||||
let Some((inputs_and_actions, rider)) = id_maps
|
let Some((inputs_and_actions, rider)) =
|
||||||
.uid_entity(is_mount.rider)
|
id_maps.uid_entity(is_mount.rider).and_then(|rider| {
|
||||||
.and_then(|rider| {
|
controllers.get_mut(rider).map(|c| {
|
||||||
controllers
|
(
|
||||||
.get_mut(rider)
|
// Only take inputs and actions from the rider if the mount is not
|
||||||
.map(|c| (
|
// intelligent (TODO: expand the definition of 'intelligent').
|
||||||
// Only take inputs and actions from the rider if the mount is not intelligent (TODO: expand the definition of 'intelligent').
|
|
||||||
if !matches!(body, Some(Body::Humanoid(_))) {
|
if !matches!(body, Some(Body::Humanoid(_))) {
|
||||||
let actions = c.actions.drain_filter(|action| match action {
|
let actions = c
|
||||||
ControlAction::StartInput { input: i, .. }
|
.actions
|
||||||
| ControlAction::CancelInput(i) => matches!(i, InputKind::Jump | InputKind::Fly | InputKind::Roll),
|
.extract_if(|action| match action {
|
||||||
_ => false
|
ControlAction::StartInput { input: i, .. }
|
||||||
}).collect();
|
| ControlAction::CancelInput(i) => matches!(
|
||||||
|
i,
|
||||||
|
InputKind::Jump | InputKind::Fly | InputKind::Roll
|
||||||
|
),
|
||||||
|
_ => false,
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
Some((c.inputs.clone(), actions))
|
Some((c.inputs.clone(), actions))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
},
|
},
|
||||||
rider,
|
rider,
|
||||||
))
|
)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
else { continue };
|
else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
// ...apply the mount's position/ori/velocity to the rider...
|
// ...apply the mount's position/ori/velocity to the rider...
|
||||||
let pos = positions.get(entity).copied();
|
let pos = positions.get(entity).copied();
|
||||||
@ -151,7 +159,7 @@ impl<'a> System<'a> for Sys {
|
|||||||
let inputs = controllers.get_mut(entity).map(|c| {
|
let inputs = controllers.get_mut(entity).map(|c| {
|
||||||
let actions: Vec<_> = c
|
let actions: Vec<_> = c
|
||||||
.actions
|
.actions
|
||||||
.drain_filter(|action| match action {
|
.extract_if(|action| match action {
|
||||||
ControlAction::StartInput { input: i, .. }
|
ControlAction::StartInput { input: i, .. }
|
||||||
| ControlAction::CancelInput(i) => {
|
| ControlAction::CancelInput(i) => {
|
||||||
matches!(i, InputKind::Jump | InputKind::Fly | InputKind::Roll)
|
matches!(i, InputKind::Jump | InputKind::Fly | InputKind::Roll)
|
||||||
|
@ -222,7 +222,7 @@ impl<'a> PhysicsData<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Update PreviousPhysCache
|
// Update PreviousPhysCache
|
||||||
for (_, vel, position, ori, mut phys_cache, collider, scale, cs) in (
|
for (_, vel, position, ori, phys_cache, collider, scale, cs) in (
|
||||||
&self.read.entities,
|
&self.read.entities,
|
||||||
&self.write.velocities,
|
&self.write.velocities,
|
||||||
&self.write.positions,
|
&self.write.positions,
|
||||||
@ -797,7 +797,7 @@ impl<'a> PhysicsData<'a> {
|
|||||||
ori,
|
ori,
|
||||||
body,
|
body,
|
||||||
character_state,
|
character_state,
|
||||||
mut physics_state,
|
physics_state,
|
||||||
pos_vel_ori_defer,
|
pos_vel_ori_defer,
|
||||||
previous_cache,
|
previous_cache,
|
||||||
_,
|
_,
|
||||||
|
@ -74,7 +74,7 @@ impl<'a> System<'a> for Sys {
|
|||||||
let mut rng = rand::thread_rng();
|
let mut rng = rand::thread_rng();
|
||||||
|
|
||||||
// Attacks
|
// Attacks
|
||||||
'projectile_loop: for (entity, pos, physics, vel, mut projectile) in (
|
'projectile_loop: for (entity, pos, physics, vel, projectile) in (
|
||||||
&read_data.entities,
|
&read_data.entities,
|
||||||
&read_data.positions,
|
&read_data.positions,
|
||||||
&read_data.physics_states,
|
&read_data.physics_states,
|
||||||
|
@ -141,6 +141,7 @@ pub fn create_player(state: &mut State) -> Entity {
|
|||||||
.build()
|
.build()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::needless_pass_by_ref_mut)]
|
||||||
pub fn generate_chunk(state: &mut State, chunk_pos: Vec2<i32>) {
|
pub fn generate_chunk(state: &mut State, chunk_pos: Vec2<i32>) {
|
||||||
let (x, y) = chunk_pos.map(|e| e.to_le_bytes()).into_tuple();
|
let (x, y) = chunk_pos.map(|e| e.to_le_bytes()).into_tuple();
|
||||||
let mut rng = SmallRng::from_seed([
|
let mut rng = SmallRng::from_seed([
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
#![feature(drain_filter)]
|
|
||||||
//! Network Protocol
|
//! Network Protocol
|
||||||
//!
|
//!
|
||||||
//! a I/O-Free protocol for the veloren network crate.
|
//! a I/O-Free protocol for the veloren network crate.
|
||||||
|
@ -388,7 +388,8 @@ where
|
|||||||
// try to order pending
|
// try to order pending
|
||||||
let mut pending_violated = false;
|
let mut pending_violated = false;
|
||||||
let mut reliable = vec![];
|
let mut reliable = vec![];
|
||||||
self.pending_reliable_buffers.drain_filter(|(_, buffer)| {
|
|
||||||
|
self.pending_reliable_buffers.retain(|(_, buffer)| {
|
||||||
// try to get Sid without touching buffer
|
// try to get Sid without touching buffer
|
||||||
let mut testbuffer = buffer.clone();
|
let mut testbuffer = buffer.clone();
|
||||||
match ITFrame::read_frame(&mut testbuffer) {
|
match ITFrame::read_frame(&mut testbuffer) {
|
||||||
@ -398,13 +399,13 @@ where
|
|||||||
length: _,
|
length: _,
|
||||||
})) => {
|
})) => {
|
||||||
reliable.push((sid, buffer.clone()));
|
reliable.push((sid, buffer.clone()));
|
||||||
true
|
false
|
||||||
},
|
},
|
||||||
Ok(Some(_)) | Err(_) => {
|
Ok(Some(_)) | Err(_) => {
|
||||||
pending_violated = true;
|
pending_violated = true;
|
||||||
true
|
false
|
||||||
},
|
},
|
||||||
Ok(None) => false,
|
Ok(None) => true,
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -500,7 +500,7 @@ pub enum ProtocolsError {
|
|||||||
}
|
}
|
||||||
|
|
||||||
///////////////////////////////////////
|
///////////////////////////////////////
|
||||||
//// TCP
|
// TCP
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct TcpDrain {
|
pub struct TcpDrain {
|
||||||
half: OwnedWriteHalf,
|
half: OwnedWriteHalf,
|
||||||
@ -546,7 +546,7 @@ impl UnreliableSink for TcpSink {
|
|||||||
}
|
}
|
||||||
|
|
||||||
///////////////////////////////////////
|
///////////////////////////////////////
|
||||||
//// MPSC
|
// MPSC
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct MpscDrain {
|
pub struct MpscDrain {
|
||||||
sender: mpsc::Sender<MpscMsg>,
|
sender: mpsc::Sender<MpscMsg>,
|
||||||
@ -584,7 +584,7 @@ impl UnreliableSink for MpscSink {
|
|||||||
}
|
}
|
||||||
|
|
||||||
///////////////////////////////////////
|
///////////////////////////////////////
|
||||||
//// QUIC
|
// QUIC
|
||||||
#[cfg(feature = "quic")]
|
#[cfg(feature = "quic")]
|
||||||
type QuicStream = (
|
type QuicStream = (
|
||||||
BytesMut,
|
BytesMut,
|
||||||
|
@ -456,7 +456,6 @@ impl BParticipant {
|
|||||||
let retrigger = |cid: Cid, mut p: RecvProtocols, map: &mut HashMap<_, _>| {
|
let retrigger = |cid: Cid, mut p: RecvProtocols, map: &mut HashMap<_, _>| {
|
||||||
let hacky_recv_s = hacky_recv_s.clone();
|
let hacky_recv_s = hacky_recv_s.clone();
|
||||||
let handle = tokio::spawn(async move {
|
let handle = tokio::spawn(async move {
|
||||||
let cid = cid;
|
|
||||||
let r = p.recv().await;
|
let r = p.recv().await;
|
||||||
let _ = hacky_recv_s.send((cid, r, p)); // ignoring failed
|
let _ = hacky_recv_s.send((cid, r, p)); // ignoring failed
|
||||||
});
|
});
|
||||||
@ -862,6 +861,7 @@ mod tests {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::needless_pass_by_ref_mut)]
|
||||||
async fn mock_mpsc(
|
async fn mock_mpsc(
|
||||||
cid: Cid,
|
cid: Cid,
|
||||||
_runtime: &Arc<Runtime>,
|
_runtime: &Arc<Runtime>,
|
||||||
|
@ -27,7 +27,6 @@ impl<T: Eq + Hash> DeferredTracer<T> {
|
|||||||
*self.items.entry(t).or_default() += 1;
|
*self.items.entry(t).or_default() += 1;
|
||||||
self.last = Instant::now();
|
self.last = Instant::now();
|
||||||
self.last_cnt += 1;
|
self.last_cnt += 1;
|
||||||
} else {
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -94,7 +94,9 @@ impl Data {
|
|||||||
.faction
|
.faction
|
||||||
.and_then(|f| this.factions.get(f))
|
.and_then(|f| this.factions.get(f))
|
||||||
.map(|f| f.good_or_evil)
|
.map(|f| f.good_or_evil)
|
||||||
else { continue };
|
else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
let rand_wpos = |rng: &mut SmallRng, matches_plot: fn(&PlotKind) -> bool| {
|
let rand_wpos = |rng: &mut SmallRng, matches_plot: fn(&PlotKind) -> bool| {
|
||||||
let wpos2d = site2
|
let wpos2d = site2
|
||||||
@ -257,18 +259,23 @@ impl Data {
|
|||||||
let Some(species) = [
|
let Some(species) = [
|
||||||
Some(comp::body::biped_large::Species::Ogre),
|
Some(comp::body::biped_large::Species::Ogre),
|
||||||
Some(comp::body::biped_large::Species::Cyclops),
|
Some(comp::body::biped_large::Species::Cyclops),
|
||||||
Some(comp::body::biped_large::Species::Wendigo).filter(|_| biome == BiomeKind::Taiga),
|
Some(comp::body::biped_large::Species::Wendigo)
|
||||||
|
.filter(|_| biome == BiomeKind::Taiga),
|
||||||
Some(comp::body::biped_large::Species::Cavetroll),
|
Some(comp::body::biped_large::Species::Cavetroll),
|
||||||
Some(comp::body::biped_large::Species::Mountaintroll).filter(|_| biome == BiomeKind::Mountain),
|
Some(comp::body::biped_large::Species::Mountaintroll)
|
||||||
Some(comp::body::biped_large::Species::Swamptroll).filter(|_| biome == BiomeKind::Swamp),
|
.filter(|_| biome == BiomeKind::Mountain),
|
||||||
|
Some(comp::body::biped_large::Species::Swamptroll)
|
||||||
|
.filter(|_| biome == BiomeKind::Swamp),
|
||||||
Some(comp::body::biped_large::Species::Blueoni),
|
Some(comp::body::biped_large::Species::Blueoni),
|
||||||
Some(comp::body::biped_large::Species::Redoni),
|
Some(comp::body::biped_large::Species::Redoni),
|
||||||
Some(comp::body::biped_large::Species::Tursus).filter(|_| chunk.temp < CONFIG.snow_temp),
|
Some(comp::body::biped_large::Species::Tursus)
|
||||||
|
.filter(|_| chunk.temp < CONFIG.snow_temp),
|
||||||
]
|
]
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flatten()
|
.flatten()
|
||||||
.choose(&mut rng)
|
.choose(&mut rng) else {
|
||||||
else { continue };
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
this.npcs.create_npc(Npc::new(
|
this.npcs.create_npc(Npc::new(
|
||||||
rng.gen(),
|
rng.gen(),
|
||||||
|
@ -1 +1 @@
|
|||||||
nightly-2023-04-20
|
nightly-2023-09-28
|
||||||
|
@ -1,3 +1,7 @@
|
|||||||
|
#![allow(
|
||||||
|
clippy::needless_pass_by_ref_mut //until we find a better way for specs
|
||||||
|
)]
|
||||||
|
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use common::comp;
|
use common::comp;
|
||||||
use server::persistence::SqlLogMode;
|
use server::persistence::SqlLogMode;
|
||||||
|
@ -2924,11 +2924,8 @@ impl<'a> AgentData<'a> {
|
|||||||
{
|
{
|
||||||
agent.action_state.counters[FCounters::SummonThreshold as usize] -=
|
agent.action_state.counters[FCounters::SummonThreshold as usize] -=
|
||||||
SUMMON_THRESHOLD;
|
SUMMON_THRESHOLD;
|
||||||
if !agent.action_state.conditions[Conditions::AttackToggle as usize] {
|
agent.action_state.conditions[Conditions::AttackToggle as usize] =
|
||||||
agent.action_state.conditions[Conditions::AttackToggle as usize] = true;
|
!agent.action_state.conditions[Conditions::AttackToggle as usize];
|
||||||
} else {
|
|
||||||
agent.action_state.conditions[Conditions::AttackToggle as usize] = false;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// If target is in melee range use flamecrush
|
// If target is in melee range use flamecrush
|
||||||
|
@ -1,4 +1,7 @@
|
|||||||
#![feature(exclusive_range_pattern, let_chains)]
|
#![feature(exclusive_range_pattern, let_chains)]
|
||||||
|
#![allow(
|
||||||
|
clippy::needless_pass_by_ref_mut //until we find a better way for specs
|
||||||
|
)]
|
||||||
|
|
||||||
#[cfg(all(feature = "be-dyn-lib", feature = "use-dyn-lib"))]
|
#[cfg(all(feature = "be-dyn-lib", feature = "use-dyn-lib"))]
|
||||||
compile_error!("Can't use both \"be-dyn-lib\" and \"use-dyn-lib\" features at once");
|
compile_error!("Can't use both \"be-dyn-lib\" and \"use-dyn-lib\" features at once");
|
||||||
|
@ -1,15 +1,10 @@
|
|||||||
#![deny(unsafe_code)]
|
#![deny(unsafe_code)]
|
||||||
#![allow(clippy::option_map_unit_fn)]
|
#![allow(
|
||||||
#![deny(clippy::clone_on_ref_ptr)]
|
clippy::option_map_unit_fn,
|
||||||
#![feature(
|
clippy::needless_pass_by_ref_mut //until we find a better way for specs
|
||||||
box_patterns,
|
|
||||||
drain_filter,
|
|
||||||
let_chains,
|
|
||||||
never_type,
|
|
||||||
option_zip,
|
|
||||||
unwrap_infallible
|
|
||||||
)]
|
)]
|
||||||
#![feature(hash_drain_filter)]
|
#![deny(clippy::clone_on_ref_ptr)]
|
||||||
|
#![feature(box_patterns, let_chains, never_type, option_zip, unwrap_infallible)]
|
||||||
|
|
||||||
pub mod automod;
|
pub mod automod;
|
||||||
mod character_creator;
|
mod character_creator;
|
||||||
@ -710,22 +705,20 @@ impl Server {
|
|||||||
// significant changes to this code. Here is the approximate order of
|
// significant changes to this code. Here is the approximate order of
|
||||||
// things. Please update it as this code changes.
|
// things. Please update it as this code changes.
|
||||||
//
|
//
|
||||||
// 1) Collect input from the frontend, apply input effects to the
|
// 1) Collect input from the frontend, apply input effects to the state of the
|
||||||
// state of the game
|
// game
|
||||||
// 2) Go through any events (timer-driven or otherwise) that need handling
|
// 2) Go through any events (timer-driven or otherwise) that need handling and
|
||||||
// and apply them to the state of the game
|
// apply them to the state of the game
|
||||||
// 3) Go through all incoming client network communications, apply them to
|
// 3) Go through all incoming client network communications, apply them to the
|
||||||
// the game state
|
// game state
|
||||||
// 4) Perform a single LocalState tick (i.e: update the world and entities
|
// 4) Perform a single LocalState tick (i.e: update the world and entities in
|
||||||
// in the world)
|
// the world)
|
||||||
// 5) Go through the terrain update queue and apply all changes to
|
// 5) Go through the terrain update queue and apply all changes to the terrain
|
||||||
// the terrain
|
|
||||||
// 6) Send relevant state updates to all clients
|
// 6) Send relevant state updates to all clients
|
||||||
// 7) Check for persistence updates related to character data, and message the
|
// 7) Check for persistence updates related to character data, and message the
|
||||||
// relevant entities
|
// relevant entities
|
||||||
// 8) Update Metrics with current data
|
// 8) Update Metrics with current data
|
||||||
// 9) Finish the tick, passing control of the main thread back
|
// 9) Finish the tick, passing control of the main thread back to the frontend
|
||||||
// to the frontend
|
|
||||||
|
|
||||||
// 1) Build up a list of events for this frame, to be passed to the frontend.
|
// 1) Build up a list of events for this frame, to be passed to the frontend.
|
||||||
let mut frontend_events = Vec::new();
|
let mut frontend_events = Vec::new();
|
||||||
|
@ -1062,7 +1062,7 @@ pub fn update(
|
|||||||
// The `defer_foreign_keys` pragma treats the foreign key
|
// The `defer_foreign_keys` pragma treats the foreign key
|
||||||
// constraints as deferred for the next transaction (it turns itself
|
// constraints as deferred for the next transaction (it turns itself
|
||||||
// off at the commit boundary). https://sqlite.org/foreignkeys.html#fk_deferred
|
// off at the commit boundary). https://sqlite.org/foreignkeys.html#fk_deferred
|
||||||
transaction.pragma_update(None, "defer_foreign_keys", &"ON".to_string())?;
|
transaction.pragma_update(None, "defer_foreign_keys", "ON")?;
|
||||||
|
|
||||||
let mut stmt = transaction.prepare_cached(
|
let mut stmt = transaction.prepare_cached(
|
||||||
"
|
"
|
||||||
|
@ -267,8 +267,8 @@ impl CharacterUpdater {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn process_batch_completion(&mut self, completed_batch_id: u64) {
|
pub fn process_batch_completion(&mut self, completed_batch_id: u64) {
|
||||||
self.pending_database_actions.drain_filter(|_, event| {
|
self.pending_database_actions.retain(|_, event| {
|
||||||
matches!(event, DatabaseAction::Submitted {
|
!matches!(event, DatabaseAction::Submitted {
|
||||||
batch_id,
|
batch_id,
|
||||||
} if completed_batch_id == *batch_id)
|
} if completed_batch_id == *batch_id)
|
||||||
});
|
});
|
||||||
|
@ -875,8 +875,12 @@ impl StateExt for State {
|
|||||||
let mut automod = self.ecs().write_resource::<AutoMod>();
|
let mut automod = self.ecs().write_resource::<AutoMod>();
|
||||||
let client = self.ecs().read_storage::<Client>();
|
let client = self.ecs().read_storage::<Client>();
|
||||||
let player = self.ecs().read_storage::<Player>();
|
let player = self.ecs().read_storage::<Player>();
|
||||||
let Some(client) = client.get(entity) else { return true };
|
let Some(client) = client.get(entity) else {
|
||||||
let Some(player) = player.get(entity) else { return true };
|
return true;
|
||||||
|
};
|
||||||
|
let Some(player) = player.get(entity) else {
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
|
||||||
match automod.validate_chat_msg(
|
match automod.validate_chat_msg(
|
||||||
player.uuid(),
|
player.uuid(),
|
||||||
|
@ -885,11 +885,7 @@ fn remembers_fight_with(
|
|||||||
// read_data: &ReadData,
|
// read_data: &ReadData,
|
||||||
// agent: &mut Agent,
|
// agent: &mut Agent,
|
||||||
// target: EcsEntity,
|
// target: EcsEntity,
|
||||||
// ) {
|
// ) { rtsim_entity.is_some().then(|| { read_data .stats .get(target)
|
||||||
// rtsim_entity.is_some().then(|| {
|
// .map(|stats| agent.add_fight_to_memory(&stats.name,
|
||||||
// read_data
|
|
||||||
// .stats
|
|
||||||
// .get(target)
|
|
||||||
// .map(|stats| agent.add_fight_to_memory(&stats.name,
|
|
||||||
// read_data.time.0)) });
|
// read_data.time.0)) });
|
||||||
// }
|
// }
|
||||||
|
@ -59,7 +59,7 @@ impl<'a> System<'a> for Sys {
|
|||||||
|
|
||||||
for (pet_entity, owner_pos) in lost_pets.iter() {
|
for (pet_entity, owner_pos) in lost_pets.iter() {
|
||||||
let stay = agn.get(*pet_entity).and_then(|x| x.stay_pos).is_some();
|
let stay = agn.get(*pet_entity).and_then(|x| x.stay_pos).is_some();
|
||||||
if let Some(mut pet_pos) = positions.get_mut(*pet_entity) && !stay{
|
if let Some(pet_pos) = positions.get_mut(*pet_entity) && !stay{
|
||||||
// Move the pets to their owner's position
|
// Move the pets to their owner's position
|
||||||
// TODO: Create a teleportation event to handle this instead of
|
// TODO: Create a teleportation event to handle this instead of
|
||||||
// processing the entity position move here
|
// processing the entity position move here
|
||||||
|
@ -61,8 +61,8 @@ impl<'a> System<'a> for Sys {
|
|||||||
// To update subscriptions
|
// To update subscriptions
|
||||||
// 1. Iterate through clients
|
// 1. Iterate through clients
|
||||||
// 2. Calculate current chunk position
|
// 2. Calculate current chunk position
|
||||||
// 3. If chunk is different (use fuzziness) or the client view distance
|
// 3. If chunk is different (use fuzziness) or the client view distance has
|
||||||
// has changed continue, otherwise return
|
// changed continue, otherwise return
|
||||||
// 4. Iterate through subscribed regions
|
// 4. Iterate through subscribed regions
|
||||||
// 5. Check if region is still in range (use fuzziness)
|
// 5. Check if region is still in range (use fuzziness)
|
||||||
// 6. If not in range
|
// 6. If not in range
|
||||||
@ -71,7 +71,7 @@ impl<'a> System<'a> for Sys {
|
|||||||
// 7. Determine list of regions that are in range and iterate through it
|
// 7. Determine list of regions that are in range and iterate through it
|
||||||
// - check if in hashset (hash calc) if not add it
|
// - check if in hashset (hash calc) if not add it
|
||||||
let mut regions_to_remove = Vec::new();
|
let mut regions_to_remove = Vec::new();
|
||||||
for (mut subscription, pos, presence, client_entity, client) in (
|
for (subscription, pos, presence, client_entity, client) in (
|
||||||
&mut subscriptions,
|
&mut subscriptions,
|
||||||
&positions,
|
&positions,
|
||||||
&presences,
|
&presences,
|
||||||
|
@ -83,11 +83,14 @@ impl<'a> System<'a> for Sys {
|
|||||||
.join()
|
.join()
|
||||||
{
|
{
|
||||||
let portal_pos = positions.get(teleporting.portal);
|
let portal_pos = positions.get(teleporting.portal);
|
||||||
let Some(Object::Portal { target, requires_no_aggro, .. }) = objects
|
let Some(Object::Portal {
|
||||||
.get(teleporting.portal)
|
target,
|
||||||
|
requires_no_aggro,
|
||||||
|
..
|
||||||
|
}) = objects.get(teleporting.portal)
|
||||||
else {
|
else {
|
||||||
cancel_teleporting.push(entity);
|
cancel_teleporting.push(entity);
|
||||||
continue
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
if portal_pos.map_or(true, |portal_pos| {
|
if portal_pos.map_or(true, |portal_pos| {
|
||||||
|
@ -104,7 +104,6 @@ impl Animation for AlphaAnimation {
|
|||||||
Quaternion::rotation_y(-0.2 + move1 * -0.3) * Quaternion::rotation_z(0.2);
|
Quaternion::rotation_y(-0.2 + move1 * -0.3) * Quaternion::rotation_z(0.2);
|
||||||
next.wing_out_r.orientation =
|
next.wing_out_r.orientation =
|
||||||
Quaternion::rotation_y(0.2 + move1 * 0.3) * Quaternion::rotation_z(-0.2);
|
Quaternion::rotation_y(0.2 + move1 * 0.3) * Quaternion::rotation_z(-0.2);
|
||||||
} else {
|
|
||||||
}
|
}
|
||||||
|
|
||||||
next
|
next
|
||||||
|
@ -137,7 +137,6 @@ impl Animation for ComboAnimation {
|
|||||||
* Quaternion::rotation_z(0.2);
|
* Quaternion::rotation_z(0.2);
|
||||||
next.wing_out_r.orientation = Quaternion::rotation_y(0.2 + move1 * 0.3)
|
next.wing_out_r.orientation = Quaternion::rotation_y(0.2 + move1 * 0.3)
|
||||||
* Quaternion::rotation_z(-0.2);
|
* Quaternion::rotation_z(-0.2);
|
||||||
} else {
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
_ => {},
|
_ => {},
|
||||||
|
@ -88,7 +88,6 @@ impl Animation for ShockwaveAnimation {
|
|||||||
next.foot_l.orientation = Quaternion::rotation_x(0.0);
|
next.foot_l.orientation = Quaternion::rotation_x(0.0);
|
||||||
next.foot_r.position = Vec3::new(s_a.foot.0, s_a.foot.1, s_a.foot.2);
|
next.foot_r.position = Vec3::new(s_a.foot.0, s_a.foot.1, s_a.foot.2);
|
||||||
next.foot_r.orientation = Quaternion::rotation_x(0.0);
|
next.foot_r.orientation = Quaternion::rotation_x(0.0);
|
||||||
} else {
|
|
||||||
}
|
}
|
||||||
|
|
||||||
next
|
next
|
||||||
|
@ -79,7 +79,6 @@ impl Animation for ShootAnimation {
|
|||||||
|
|
||||||
next.foot_l.orientation = Quaternion::rotation_x(movement1abs * 0.3);
|
next.foot_l.orientation = Quaternion::rotation_x(movement1abs * 0.3);
|
||||||
next.foot_r.orientation = Quaternion::rotation_x(movement1abs * 0.3);
|
next.foot_r.orientation = Quaternion::rotation_x(movement1abs * 0.3);
|
||||||
} else {
|
|
||||||
}
|
}
|
||||||
if velocity.xy().magnitude() < 1.0 {
|
if velocity.xy().magnitude() < 1.0 {
|
||||||
next.wing_in_l.orientation = Quaternion::rotation_y(-1.0 + movement1abs * 0.8)
|
next.wing_in_l.orientation = Quaternion::rotation_y(-1.0 + movement1abs * 0.8)
|
||||||
|
@ -103,7 +103,6 @@ impl Animation for SummonAnimation {
|
|||||||
next.tail_rear.position = Vec3::new(0.0, s_a.tail_rear.0, s_a.tail_rear.1);
|
next.tail_rear.position = Vec3::new(0.0, s_a.tail_rear.0, s_a.tail_rear.1);
|
||||||
next.tail_rear.orientation =
|
next.tail_rear.orientation =
|
||||||
Quaternion::rotation_x(-movement1abs * 0.1 + movement2abs * 0.1 + twitch2 * 0.02);
|
Quaternion::rotation_x(-movement1abs * 0.1 + movement2abs * 0.1 + twitch2 * 0.02);
|
||||||
} else {
|
|
||||||
}
|
}
|
||||||
|
|
||||||
next
|
next
|
||||||
|
@ -83,7 +83,6 @@ impl Animation for AlphaAnimation {
|
|||||||
Quaternion::rotation_y(-0.2 + move1 * -0.3) * Quaternion::rotation_z(0.2);
|
Quaternion::rotation_y(-0.2 + move1 * -0.3) * Quaternion::rotation_z(0.2);
|
||||||
next.wing_out_r.orientation =
|
next.wing_out_r.orientation =
|
||||||
Quaternion::rotation_y(0.2 + move1 * 0.3) * Quaternion::rotation_z(-0.2);
|
Quaternion::rotation_y(0.2 + move1 * 0.3) * Quaternion::rotation_z(-0.2);
|
||||||
} else {
|
|
||||||
}
|
}
|
||||||
|
|
||||||
next
|
next
|
||||||
|
@ -68,7 +68,6 @@ impl Animation for ShockwaveAnimation {
|
|||||||
next.leg_l.orientation = Quaternion::rotation_x(0.0);
|
next.leg_l.orientation = Quaternion::rotation_x(0.0);
|
||||||
next.leg_r.position = Vec3::new(s_a.leg.0, s_a.leg.1, s_a.leg.2);
|
next.leg_r.position = Vec3::new(s_a.leg.0, s_a.leg.1, s_a.leg.2);
|
||||||
next.leg_r.orientation = Quaternion::rotation_x(0.0);
|
next.leg_r.orientation = Quaternion::rotation_x(0.0);
|
||||||
} else {
|
|
||||||
}
|
}
|
||||||
|
|
||||||
next
|
next
|
||||||
|
@ -66,7 +66,6 @@ impl Animation for ShootAnimation {
|
|||||||
|
|
||||||
next.leg_l.orientation = Quaternion::rotation_x(movement1abs * -0.5);
|
next.leg_l.orientation = Quaternion::rotation_x(movement1abs * -0.5);
|
||||||
next.leg_r.orientation = Quaternion::rotation_x(movement1abs * -0.5);
|
next.leg_r.orientation = Quaternion::rotation_x(movement1abs * -0.5);
|
||||||
} else {
|
|
||||||
}
|
}
|
||||||
if velocity.xy().magnitude() < 1.0 {
|
if velocity.xy().magnitude() < 1.0 {
|
||||||
next.wing_in_l.orientation = Quaternion::rotation_y(-1.0 + movement1abs * 0.8)
|
next.wing_in_l.orientation = Quaternion::rotation_y(-1.0 + movement1abs * 0.8)
|
||||||
|
@ -84,7 +84,6 @@ impl Animation for SummonAnimation {
|
|||||||
next.tail.position = Vec3::new(0.0, s_a.tail.0, s_a.tail.1);
|
next.tail.position = Vec3::new(0.0, s_a.tail.0, s_a.tail.1);
|
||||||
next.tail.orientation =
|
next.tail.orientation =
|
||||||
Quaternion::rotation_x(-movement1abs * 0.1 + movement2abs * 0.1 + twitch2 * 0.02);
|
Quaternion::rotation_x(-movement1abs * 0.1 + movement2abs * 0.1 + twitch2 * 0.02);
|
||||||
} else {
|
|
||||||
}
|
}
|
||||||
|
|
||||||
next
|
next
|
||||||
|
@ -105,7 +105,6 @@ impl Animation for BeamAnimation {
|
|||||||
Quaternion::rotation_x(move1 * 0.1) * Quaternion::rotation_z(move1 * -0.1);
|
Quaternion::rotation_x(move1 * 0.1) * Quaternion::rotation_z(move1 * -0.1);
|
||||||
next.shorts.orientation =
|
next.shorts.orientation =
|
||||||
Quaternion::rotation_x(move1 * 0.2) * Quaternion::rotation_z(move1 * -0.2);
|
Quaternion::rotation_x(move1 * 0.2) * Quaternion::rotation_z(move1 * -0.2);
|
||||||
} else {
|
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
_ => {},
|
_ => {},
|
||||||
|
@ -84,7 +84,6 @@ impl Animation for RepeaterAnimation {
|
|||||||
* Quaternion::rotation_z(move1 * -0.6 + move3 * 0.8);
|
* Quaternion::rotation_z(move1 * -0.6 + move3 * 0.8);
|
||||||
next.chest.position = Vec3::new(0.0, s_a.chest.0, s_a.chest.1);
|
next.chest.position = Vec3::new(0.0, s_a.chest.0, s_a.chest.1);
|
||||||
next.chest.orientation = Quaternion::rotation_x(0.0);
|
next.chest.orientation = Quaternion::rotation_x(0.0);
|
||||||
} else {
|
|
||||||
};
|
};
|
||||||
next.shorts.position = Vec3::new(0.0, s_a.shorts.0 + move1 * 2.0, s_a.shorts.1);
|
next.shorts.position = Vec3::new(0.0, s_a.shorts.0 + move1 * 2.0, s_a.shorts.1);
|
||||||
next.shorts.orientation = Quaternion::rotation_x(move1 * 0.2 + move3 * 0.2);
|
next.shorts.orientation = Quaternion::rotation_x(move1 * 0.2 + move3 * 0.2);
|
||||||
|
@ -218,7 +218,6 @@ impl Animation for SneakWieldAnimation {
|
|||||||
next.shorts.position = Vec3::new(0.0, 1.0 + s_a.shorts.0, s_a.shorts.1);
|
next.shorts.position = Vec3::new(0.0, 1.0 + s_a.shorts.0, s_a.shorts.1);
|
||||||
next.shorts.orientation =
|
next.shorts.orientation =
|
||||||
Quaternion::rotation_x(0.15) * Quaternion::rotation_z(0.25);
|
Quaternion::rotation_x(0.15) * Quaternion::rotation_z(0.25);
|
||||||
} else {
|
|
||||||
}
|
}
|
||||||
next.hand_l.position = Vec3::new(s_a.ahl.0, s_a.ahl.1, s_a.ahl.2);
|
next.hand_l.position = Vec3::new(s_a.ahl.0, s_a.ahl.1, s_a.ahl.2);
|
||||||
next.hand_l.orientation =
|
next.hand_l.orientation =
|
||||||
|
@ -208,7 +208,6 @@ impl Animation for WieldAnimation {
|
|||||||
next.shorts.position = Vec3::new(0.0, 1.0 + s_a.shorts.0, s_a.shorts.1);
|
next.shorts.position = Vec3::new(0.0, 1.0 + s_a.shorts.0, s_a.shorts.1);
|
||||||
next.shorts.orientation =
|
next.shorts.orientation =
|
||||||
Quaternion::rotation_x(0.15) * Quaternion::rotation_z(0.25);
|
Quaternion::rotation_x(0.15) * Quaternion::rotation_z(0.25);
|
||||||
} else {
|
|
||||||
}
|
}
|
||||||
next.hand_l.position = Vec3::new(s_a.ahl.0, s_a.ahl.1, s_a.ahl.2);
|
next.hand_l.position = Vec3::new(s_a.ahl.0, s_a.ahl.1, s_a.ahl.2);
|
||||||
next.hand_l.orientation =
|
next.hand_l.orientation =
|
||||||
|
@ -73,7 +73,6 @@ impl Animation for ShockwaveAnimation {
|
|||||||
next.foot_l.position = Vec3::new(-s_a.foot.0, s_a.foot.1, s_a.foot.2 + move2);
|
next.foot_l.position = Vec3::new(-s_a.foot.0, s_a.foot.1, s_a.foot.2 + move2);
|
||||||
|
|
||||||
next.foot_r.position = Vec3::new(s_a.foot.0, s_a.foot.1, s_a.foot.2 + move2);
|
next.foot_r.position = Vec3::new(s_a.foot.0, s_a.foot.1, s_a.foot.2 + move2);
|
||||||
} else {
|
|
||||||
}
|
}
|
||||||
next
|
next
|
||||||
}
|
}
|
||||||
|
@ -66,7 +66,6 @@ impl Animation for BreatheAnimation {
|
|||||||
next.foot_bl.orientation = Quaternion::rotation_y(twitch2 * 0.02);
|
next.foot_bl.orientation = Quaternion::rotation_y(twitch2 * 0.02);
|
||||||
|
|
||||||
next.foot_br.orientation = Quaternion::rotation_y(twitch2 * 0.02);
|
next.foot_br.orientation = Quaternion::rotation_y(twitch2 * 0.02);
|
||||||
} else {
|
|
||||||
};
|
};
|
||||||
next
|
next
|
||||||
}
|
}
|
||||||
|
@ -71,7 +71,6 @@ impl Animation for ShootAnimation {
|
|||||||
next.foot_bl.position = Vec3::new(-s_a.feet_b.0, s_a.feet_b.1, s_a.feet_b.2);
|
next.foot_bl.position = Vec3::new(-s_a.feet_b.0, s_a.feet_b.1, s_a.feet_b.2);
|
||||||
|
|
||||||
next.foot_br.position = Vec3::new(s_a.feet_b.0, s_a.feet_b.1, s_a.feet_b.2);
|
next.foot_br.position = Vec3::new(s_a.feet_b.0, s_a.feet_b.1, s_a.feet_b.2);
|
||||||
} else {
|
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
Some("common.abilities.custom.dagon.dagonbombs") => {
|
Some("common.abilities.custom.dagon.dagonbombs") => {
|
||||||
@ -117,7 +116,6 @@ impl Animation for ShootAnimation {
|
|||||||
next.foot_bl.position = Vec3::new(-s_a.feet_b.0, s_a.feet_b.1, s_a.feet_b.2);
|
next.foot_bl.position = Vec3::new(-s_a.feet_b.0, s_a.feet_b.1, s_a.feet_b.2);
|
||||||
|
|
||||||
next.foot_br.position = Vec3::new(s_a.feet_b.0, s_a.feet_b.1, s_a.feet_b.2);
|
next.foot_br.position = Vec3::new(s_a.feet_b.0, s_a.feet_b.1, s_a.feet_b.2);
|
||||||
} else {
|
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
_ => {
|
_ => {
|
||||||
@ -150,7 +148,6 @@ impl Animation for ShootAnimation {
|
|||||||
next.foot_bl.position = Vec3::new(-s_a.feet_b.0, s_a.feet_b.1, s_a.feet_b.2);
|
next.foot_bl.position = Vec3::new(-s_a.feet_b.0, s_a.feet_b.1, s_a.feet_b.2);
|
||||||
|
|
||||||
next.foot_br.position = Vec3::new(s_a.feet_b.0, s_a.feet_b.1, s_a.feet_b.2);
|
next.foot_br.position = Vec3::new(s_a.feet_b.0, s_a.feet_b.1, s_a.feet_b.2);
|
||||||
} else {
|
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,7 @@
|
|||||||
#![feature(stmt_expr_attributes)]
|
#![feature(stmt_expr_attributes)]
|
||||||
|
#![allow(
|
||||||
|
clippy::needless_pass_by_ref_mut //until we find a better way for specs
|
||||||
|
)]
|
||||||
|
|
||||||
#[cfg(all(feature = "be-dyn-lib", feature = "use-dyn-lib"))]
|
#[cfg(all(feature = "be-dyn-lib", feature = "use-dyn-lib"))]
|
||||||
compile_error!("Can't use both \"be-dyn-lib\" and \"use-dyn-lib\" features at once");
|
compile_error!("Can't use both \"be-dyn-lib\" and \"use-dyn-lib\" features at once");
|
||||||
|
@ -407,7 +407,7 @@ impl TabComplete for ArgumentSpec {
|
|||||||
.filter(|string| string.starts_with(part))
|
.filter(|string| string.starts_with(part))
|
||||||
.map(|c| c.to_string())
|
.map(|c| c.to_string())
|
||||||
.collect(),
|
.collect(),
|
||||||
ArgumentSpec::Boolean(_, part, _) => vec!["true", "false"]
|
ArgumentSpec::Boolean(_, part, _) => ["true", "false"]
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|string| string.starts_with(part))
|
.filter(|string| string.starts_with(part))
|
||||||
.map(|c| c.to_string())
|
.map(|c| c.to_string())
|
||||||
|
@ -68,7 +68,7 @@ impl<'a> System<'a> for Sys {
|
|||||||
.as_mut()
|
.as_mut()
|
||||||
.map(|t| *t += dt.0);
|
.map(|t| *t += dt.0);
|
||||||
|
|
||||||
for mut floater in hp_floater_list.floaters.iter_mut() {
|
for floater in hp_floater_list.floaters.iter_mut() {
|
||||||
// Increment timer
|
// Increment timer
|
||||||
floater.timer += dt.0;
|
floater.timer += dt.0;
|
||||||
floater.jump_timer += dt.0;
|
floater.jump_timer += dt.0;
|
||||||
|
@ -196,7 +196,7 @@ impl<'a> InventoryScroller<'a> {
|
|||||||
.set(self.bg_ids.bg_frame, ui);
|
.set(self.bg_ids.bg_frame, ui);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn title(&mut self, state: &mut ConrodState<'_, InventoryScrollerState>, ui: &mut UiCell<'_>) {
|
fn title(&mut self, state: &ConrodState<'_, InventoryScrollerState>, ui: &mut UiCell<'_>) {
|
||||||
Text::new(
|
Text::new(
|
||||||
&self
|
&self
|
||||||
.localized_strings
|
.localized_strings
|
||||||
@ -371,7 +371,7 @@ impl<'a> InventoryScroller<'a> {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
for (pos, item) in items.into_iter() {
|
for (pos, item) in items.into_iter() {
|
||||||
if self.details_mode && !self.is_us && matches!(item, None) {
|
if self.details_mode && !self.is_us && item.is_none() {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
let (x, y) = if self.details_mode {
|
let (x, y) = if self.details_mode {
|
||||||
@ -488,7 +488,7 @@ impl<'a> InventoryScroller<'a> {
|
|||||||
|
|
||||||
fn footer_metrics(
|
fn footer_metrics(
|
||||||
&mut self,
|
&mut self,
|
||||||
state: &mut ConrodState<'_, InventoryScrollerState>,
|
state: &ConrodState<'_, InventoryScrollerState>,
|
||||||
ui: &mut UiCell<'_>,
|
ui: &mut UiCell<'_>,
|
||||||
) {
|
) {
|
||||||
let space_used = self.inventory.populated_slots();
|
let space_used = self.inventory.populated_slots();
|
||||||
|
@ -874,13 +874,13 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn parse_cmds() {
|
fn parse_cmds() {
|
||||||
let expected: Result<(String, Vec<String>), String> = Ok(("help".to_string(), vec![]));
|
let expected: Result<(String, Vec<String>), String> = Ok(("help".to_string(), vec![]));
|
||||||
assert_eq!(parse_cmd(r#"help"#), expected);
|
assert_eq!(parse_cmd(r"help"), expected);
|
||||||
|
|
||||||
let expected: Result<(String, Vec<String>), String> = Ok(("say".to_string(), vec![
|
let expected: Result<(String, Vec<String>), String> = Ok(("say".to_string(), vec![
|
||||||
"foo".to_string(),
|
"foo".to_string(),
|
||||||
"bar".to_string(),
|
"bar".to_string(),
|
||||||
]));
|
]));
|
||||||
assert_eq!(parse_cmd(r#"say foo bar"#), expected);
|
assert_eq!(parse_cmd(r"say foo bar"), expected);
|
||||||
assert_eq!(parse_cmd(r#"say "foo" "bar""#), expected);
|
assert_eq!(parse_cmd(r#"say "foo" "bar""#), expected);
|
||||||
|
|
||||||
let expected: Result<(String, Vec<String>), String> =
|
let expected: Result<(String, Vec<String>), String> =
|
||||||
|
@ -1479,7 +1479,7 @@ impl<'a> Widget for Crafting<'a> {
|
|||||||
});
|
});
|
||||||
self.inventory
|
self.inventory
|
||||||
.slots_with_id()
|
.slots_with_id()
|
||||||
.filter(|(_, item)| item.as_ref().map_or(false, |i| can_repair(i)))
|
.filter(|(_, item)| item.as_ref().map_or(false, can_repair))
|
||||||
.for_each(|(slot, _)| {
|
.for_each(|(slot, _)| {
|
||||||
events.push(Event::RepairItem {
|
events.push(Event::RepairItem {
|
||||||
slot: Slot::Inventory(slot),
|
slot: Slot::Inventory(slot),
|
||||||
@ -1487,9 +1487,7 @@ impl<'a> Widget for Crafting<'a> {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
let can_perform = repair_slot
|
let can_perform = repair_slot.item(self.inventory).map_or(false, can_repair);
|
||||||
.item(self.inventory)
|
|
||||||
.map_or(false, |item| can_repair(item));
|
|
||||||
|
|
||||||
(repair_slot.slot, None, can_perform)
|
(repair_slot.slot, None, can_perform)
|
||||||
},
|
},
|
||||||
|
@ -515,31 +515,27 @@ impl BuffIconKind {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl PartialOrd for BuffIconKind {
|
impl PartialOrd for BuffIconKind {
|
||||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) }
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ord for BuffIconKind {
|
||||||
|
fn cmp(&self, other: &Self) -> Ordering {
|
||||||
match (self, other) {
|
match (self, other) {
|
||||||
(
|
(
|
||||||
BuffIconKind::Buff { kind, .. },
|
BuffIconKind::Buff { kind, .. },
|
||||||
BuffIconKind::Buff {
|
BuffIconKind::Buff {
|
||||||
kind: other_kind, ..
|
kind: other_kind, ..
|
||||||
},
|
},
|
||||||
) => Some(kind.cmp(other_kind)),
|
) => kind.cmp(other_kind),
|
||||||
(BuffIconKind::Buff { .. }, BuffIconKind::Stance(_)) => Some(Ordering::Greater),
|
(BuffIconKind::Buff { .. }, BuffIconKind::Stance(_)) => Ordering::Greater,
|
||||||
(BuffIconKind::Stance(_), BuffIconKind::Buff { .. }) => Some(Ordering::Less),
|
(BuffIconKind::Stance(_), BuffIconKind::Buff { .. }) => Ordering::Less,
|
||||||
(BuffIconKind::Stance(stance), BuffIconKind::Stance(stance_other)) => {
|
(BuffIconKind::Stance(stance), BuffIconKind::Stance(stance_other)) => {
|
||||||
Some(stance.cmp(stance_other))
|
stance.cmp(stance_other)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Ord for BuffIconKind {
|
|
||||||
fn cmp(&self, other: &Self) -> Ordering {
|
|
||||||
// We know this is safe since we can look at the partialord implementation and
|
|
||||||
// see that every variant is wrapped in Some
|
|
||||||
self.partial_cmp(other).unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq for BuffIconKind {
|
impl PartialEq for BuffIconKind {
|
||||||
fn eq(&self, other: &Self) -> bool {
|
fn eq(&self, other: &Self) -> bool {
|
||||||
match (self, other) {
|
match (self, other) {
|
||||||
@ -586,7 +582,7 @@ impl BuffIcon {
|
|||||||
buffs
|
buffs
|
||||||
.iter_active()
|
.iter_active()
|
||||||
.filter_map(BuffIcon::from_buffs)
|
.filter_map(BuffIcon::from_buffs)
|
||||||
.chain(stance.and_then(BuffIcon::from_stance).into_iter())
|
.chain(stance.and_then(BuffIcon::from_stance))
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -223,7 +223,7 @@ impl<'a> Widget for Quest<'a> {
|
|||||||
// [amount, item_desc]
|
// [amount, item_desc]
|
||||||
|
|
||||||
//("common.items.weapons.sword.caladbolg");
|
//("common.items.weapons.sword.caladbolg");
|
||||||
let rewards = vec![
|
let rewards = [
|
||||||
(1, "common.items.weapons.dagger.starter_dagger", "Dagger"),
|
(1, "common.items.weapons.dagger.starter_dagger", "Dagger"),
|
||||||
(4, "common.items.crafting_ing.seashells", "Seashell"),
|
(4, "common.items.crafting_ing.seashells", "Seashell"),
|
||||||
(
|
(
|
||||||
|
@ -1,17 +1,22 @@
|
|||||||
#![deny(unsafe_code)]
|
#![deny(unsafe_code)]
|
||||||
#![allow(incomplete_features)]
|
#![allow(incomplete_features)]
|
||||||
#![allow(clippy::identity_op, clippy::option_map_unit_fn)]
|
#![allow(
|
||||||
|
clippy::identity_op,
|
||||||
|
clippy::option_map_unit_fn,
|
||||||
|
clippy::needless_pass_by_ref_mut //until we find a better way for specs
|
||||||
|
)]
|
||||||
#![deny(clippy::clone_on_ref_ptr)]
|
#![deny(clippy::clone_on_ref_ptr)]
|
||||||
#![feature(
|
#![feature(
|
||||||
array_methods,
|
array_methods,
|
||||||
array_zip,
|
extract_if,
|
||||||
drain_filter,
|
|
||||||
trait_alias,
|
trait_alias,
|
||||||
option_get_or_insert_default,
|
option_get_or_insert_default,
|
||||||
map_try_insert,
|
map_try_insert,
|
||||||
slice_as_chunks,
|
slice_as_chunks,
|
||||||
let_chains,
|
let_chains,
|
||||||
generic_const_exprs
|
generic_const_exprs,
|
||||||
|
maybe_uninit_uninit_array,
|
||||||
|
maybe_uninit_array_assume_init
|
||||||
)]
|
)]
|
||||||
#![recursion_limit = "2048"]
|
#![recursion_limit = "2048"]
|
||||||
|
|
||||||
|
@ -314,7 +314,7 @@ impl PlayState for MainMenuState {
|
|||||||
password,
|
password,
|
||||||
server_address,
|
server_address,
|
||||||
} => {
|
} => {
|
||||||
let mut net_settings = &mut global_state.settings.networking;
|
let net_settings = &mut global_state.settings.networking;
|
||||||
let use_quic = net_settings.use_quic;
|
let use_quic = net_settings.use_quic;
|
||||||
net_settings.username = username.clone();
|
net_settings.username = username.clone();
|
||||||
net_settings.default_server = server_address.clone();
|
net_settings.default_server = server_address.clone();
|
||||||
|
@ -529,8 +529,8 @@ pub fn generate_mesh<'a>(
|
|||||||
(
|
(
|
||||||
opaque_deep
|
opaque_deep
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain(opaque_shallow.into_iter())
|
.chain(opaque_shallow)
|
||||||
.chain(opaque_surface.into_iter())
|
.chain(opaque_surface)
|
||||||
.collect(),
|
.collect(),
|
||||||
fluid_mesh,
|
fluid_mesh,
|
||||||
Mesh::new(),
|
Mesh::new(),
|
||||||
|
@ -39,7 +39,11 @@ impl Add<Vertex> for Vertex {
|
|||||||
|
|
||||||
fn add(self, other: Self) -> Self::Output {
|
fn add(self, other: Self) -> Self::Output {
|
||||||
Self {
|
Self {
|
||||||
pos: self.pos.zip(other.pos).map(|(a, b)| a + b),
|
pos: [
|
||||||
|
self.pos[0] + other.pos[0],
|
||||||
|
self.pos[1] + other.pos[1],
|
||||||
|
self.pos[2] + other.pos[2],
|
||||||
|
],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1484,27 +1484,13 @@ impl Renderer {
|
|||||||
// _bones: &Consts<figure::BoneData>,
|
// _bones: &Consts<figure::BoneData>,
|
||||||
// _lod: &lod_terrain::LodData,
|
// _lod: &lod_terrain::LodData,
|
||||||
// _locals: &Consts<shadow::Locals>,
|
// _locals: &Consts<shadow::Locals>,
|
||||||
// ) {
|
// ) { // FIXME: Consider reenabling at some point. /* let (point_shadow_maps,
|
||||||
// // FIXME: Consider reenabling at some point.
|
// directed_shadow_maps) = if let Some(shadow_map) = &mut self.shadow_map { (
|
||||||
// /* let (point_shadow_maps, directed_shadow_maps) =
|
// ( shadow_map.point_res.clone(), shadow_map.point_sampler.clone(), ), (
|
||||||
// if let Some(shadow_map) = &mut self.shadow_map {
|
// shadow_map.directed_res.clone(), shadow_map.directed_sampler.clone(), ), )
|
||||||
// (
|
// } else { ( (self.noise_tex.srv.clone(), self.noise_tex.sampler.clone()),
|
||||||
// (
|
// (self.noise_tex.srv.clone(), self.noise_tex.sampler.clone()), ) }; let
|
||||||
// shadow_map.point_res.clone(),
|
// model = &model.opaque;
|
||||||
// shadow_map.point_sampler.clone(),
|
|
||||||
// ),
|
|
||||||
// (
|
|
||||||
// shadow_map.directed_res.clone(),
|
|
||||||
// shadow_map.directed_sampler.clone(),
|
|
||||||
// ),
|
|
||||||
// )
|
|
||||||
// } else {
|
|
||||||
// (
|
|
||||||
// (self.noise_tex.srv.clone(), self.noise_tex.sampler.clone()),
|
|
||||||
// (self.noise_tex.srv.clone(), self.noise_tex.sampler.clone()),
|
|
||||||
// )
|
|
||||||
// };
|
|
||||||
// let model = &model.opaque;
|
|
||||||
|
|
||||||
// self.encoder.draw(
|
// self.encoder.draw(
|
||||||
// &gfx::Slice {
|
// &gfx::Slice {
|
||||||
|
@ -445,7 +445,9 @@ impl<'frame> Drawer<'frame> {
|
|||||||
/// pending uploads.
|
/// pending uploads.
|
||||||
fn run_ui_premultiply_passes(&mut self) {
|
fn run_ui_premultiply_passes(&mut self) {
|
||||||
prof_span!("run_ui_premultiply_passes");
|
prof_span!("run_ui_premultiply_passes");
|
||||||
let Some(premultiply_alpha) = self.borrow.pipelines.premultiply_alpha() else { return };
|
let Some(premultiply_alpha) = self.borrow.pipelines.premultiply_alpha() else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
let encoder = self.encoder.as_mut().unwrap();
|
let encoder = self.encoder.as_mut().unwrap();
|
||||||
let device = self.borrow.device;
|
let device = self.borrow.device;
|
||||||
|
|
||||||
|
@ -22,6 +22,11 @@ pub struct Locals {
|
|||||||
pub postprocess_bind: postprocess::BindGroup,
|
pub postprocess_bind: postprocess::BindGroup,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn arr_zip_map<const N: usize, A, B, C>(a: [A; N], b: [B; N], f: impl Fn(A, B) -> C) -> [C; N] {
|
||||||
|
let mut b = b.into_iter();
|
||||||
|
a.map(|a| f(a, b.next().unwrap()))
|
||||||
|
}
|
||||||
|
|
||||||
impl Locals {
|
impl Locals {
|
||||||
pub(super) fn new(
|
pub(super) fn new(
|
||||||
device: &wgpu::Device,
|
device: &wgpu::Device,
|
||||||
@ -58,10 +63,9 @@ impl Locals {
|
|||||||
);
|
);
|
||||||
|
|
||||||
let bloom_binds = bloom.map(|bloom| {
|
let bloom_binds = bloom.map(|bloom| {
|
||||||
bloom
|
arr_zip_map(bloom.src_views, bloom.locals, |view, locals| {
|
||||||
.src_views
|
layouts.bloom.bind(device, view, sampler, locals)
|
||||||
.zip(bloom.locals) // zip arrays
|
})
|
||||||
.map(|(view, locals)| layouts.bloom.bind(device, view, sampler, locals))
|
|
||||||
});
|
});
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
@ -107,10 +111,9 @@ impl Locals {
|
|||||||
&self.postprocess,
|
&self.postprocess,
|
||||||
);
|
);
|
||||||
self.bloom_binds = bloom.map(|bloom| {
|
self.bloom_binds = bloom.map(|bloom| {
|
||||||
bloom
|
arr_zip_map(bloom.src_views, bloom.locals, |view, locals| {
|
||||||
.src_views
|
layouts.bloom.bind(device, view, sampler, locals)
|
||||||
.zip(bloom.locals) // zip arrays
|
})
|
||||||
.map(|(view, locals)| layouts.bloom.bind(device, view, sampler, locals))
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1069,7 +1069,7 @@ fn mesh_hold() -> BoneMeshes {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/////////
|
//////
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
struct QuadrupedSmallCentralSpec(HashMap<(QSSpecies, QSBodyType), SidedQSCentralVoxSpec>);
|
struct QuadrupedSmallCentralSpec(HashMap<(QSSpecies, QSBodyType), SidedQSCentralVoxSpec>);
|
||||||
|
|
||||||
@ -1660,7 +1660,7 @@ impl QuadrupedMediumLateralSpec {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
////
|
//////
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
struct BirdMediumCentralSpec(HashMap<(BMSpecies, BMBodyType), SidedBMCentralVoxSpec>);
|
struct BirdMediumCentralSpec(HashMap<(BMSpecies, BMBodyType), SidedBMCentralVoxSpec>);
|
||||||
|
|
||||||
@ -1914,7 +1914,7 @@ impl BirdMediumLateralSpec {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
////
|
//////
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
struct TheropodCentralSpec(HashMap<(TSpecies, TBodyType), SidedTCentralVoxSpec>);
|
struct TheropodCentralSpec(HashMap<(TSpecies, TBodyType), SidedTCentralVoxSpec>);
|
||||||
|
|
||||||
@ -2244,7 +2244,7 @@ impl TheropodLateralSpec {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
////
|
//////
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
struct ArthropodCentralSpec(HashMap<(ASpecies, ABodyType), SidedACentralVoxSpec>);
|
struct ArthropodCentralSpec(HashMap<(ASpecies, ABodyType), SidedACentralVoxSpec>);
|
||||||
|
|
||||||
@ -2644,7 +2644,7 @@ impl ArthropodLateralSpec {
|
|||||||
(lateral, Vec3::from(spec.leg_br.offset))
|
(lateral, Vec3::from(spec.leg_br.offset))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
////
|
//////
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
struct FishMediumCentralSpec(HashMap<(FMSpecies, FMBodyType), SidedFMCentralVoxSpec>);
|
struct FishMediumCentralSpec(HashMap<(FMSpecies, FMBodyType), SidedFMCentralVoxSpec>);
|
||||||
|
|
||||||
@ -2850,7 +2850,7 @@ impl FishMediumLateralSpec {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
////
|
//////
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
struct FishSmallCentralSpec(HashMap<(FSSpecies, FSBodyType), SidedFSCentralVoxSpec>);
|
struct FishSmallCentralSpec(HashMap<(FSSpecies, FSBodyType), SidedFSCentralVoxSpec>);
|
||||||
|
|
||||||
@ -2994,7 +2994,7 @@ impl FishSmallLateralSpec {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
////
|
//////
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
struct BipedSmallWeaponSpec(HashMap<ToolKey, ArmorVoxSpec>);
|
struct BipedSmallWeaponSpec(HashMap<ToolKey, ArmorVoxSpec>);
|
||||||
@ -3269,8 +3269,8 @@ impl BipedSmallWeaponSpec {
|
|||||||
(tool_kind_segment, offset)
|
(tool_kind_segment, offset)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
////
|
|
||||||
|
|
||||||
|
//////
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
struct DragonCentralSpec(HashMap<(DSpecies, DBodyType), SidedDCentralVoxSpec>);
|
struct DragonCentralSpec(HashMap<(DSpecies, DBodyType), SidedDCentralVoxSpec>);
|
||||||
|
|
||||||
@ -3641,7 +3641,7 @@ impl DragonLateralSpec {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
////
|
//////
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
struct BirdLargeCentralSpec(HashMap<(BLASpecies, BLABodyType), SidedBLACentralVoxSpec>);
|
struct BirdLargeCentralSpec(HashMap<(BLASpecies, BLABodyType), SidedBLACentralVoxSpec>);
|
||||||
|
|
||||||
@ -4044,7 +4044,7 @@ impl BirdLargeLateralSpec {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
////
|
//////
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
struct BipedLargeCentralSpec(HashMap<(BLSpecies, BLBodyType), SidedBLCentralVoxSpec>);
|
struct BipedLargeCentralSpec(HashMap<(BLSpecies, BLBodyType), SidedBLCentralVoxSpec>);
|
||||||
|
|
||||||
@ -4462,7 +4462,8 @@ impl BipedLargeSecondSpec {
|
|||||||
(tool_kind_segment, offset)
|
(tool_kind_segment, offset)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
////
|
|
||||||
|
//////
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
struct GolemCentralSpec(HashMap<(GSpecies, GBodyType), SidedGCentralVoxSpec>);
|
struct GolemCentralSpec(HashMap<(GSpecies, GBodyType), SidedGCentralVoxSpec>);
|
||||||
|
|
||||||
@ -4772,8 +4773,7 @@ impl GolemLateralSpec {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/////
|
//////
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
struct QuadrupedLowCentralSpec(HashMap<(QLSpecies, QLBodyType), SidedQLCentralVoxSpec>);
|
struct QuadrupedLowCentralSpec(HashMap<(QLSpecies, QLBodyType), SidedQLCentralVoxSpec>);
|
||||||
|
|
||||||
@ -5050,8 +5050,7 @@ impl QuadrupedLowLateralSpec {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
////
|
//////
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
struct ObjectCentralSpec(HashMap<object::Body, SidedObjectCentralVoxSpec>);
|
struct ObjectCentralSpec(HashMap<object::Body, SidedObjectCentralVoxSpec>);
|
||||||
|
|
||||||
|
@ -648,7 +648,7 @@ impl FigureMgr {
|
|||||||
}
|
}
|
||||||
let dt = ecs.fetch::<DeltaTime>().0;
|
let dt = ecs.fetch::<DeltaTime>().0;
|
||||||
let updater = ecs.read_resource::<LazyUpdate>();
|
let updater = ecs.read_resource::<LazyUpdate>();
|
||||||
for (entity, light_emitter_opt, interpolated, pos, body, mut light_anim) in (
|
for (entity, light_emitter_opt, interpolated, pos, body, light_anim) in (
|
||||||
&ecs.entities(),
|
&ecs.entities(),
|
||||||
ecs.read_storage::<LightEmitter>().maybe(),
|
ecs.read_storage::<LightEmitter>().maybe(),
|
||||||
ecs.read_storage::<Interpolated>().maybe(),
|
ecs.read_storage::<Interpolated>().maybe(),
|
||||||
@ -1101,9 +1101,9 @@ impl FigureMgr {
|
|||||||
let holding_lantern = inventory
|
let holding_lantern = inventory
|
||||||
.map_or(false, |i| i.equipped(EquipSlot::Lantern).is_some())
|
.map_or(false, |i| i.equipped(EquipSlot::Lantern).is_some())
|
||||||
&& light_emitter.is_some()
|
&& light_emitter.is_some()
|
||||||
&& !((matches!(second_tool_hand, Some(_))
|
&& !(second_tool_hand.is_some()
|
||||||
|| matches!(active_tool_hand, Some(Hands::Two)))
|
|| matches!(active_tool_hand, Some(Hands::Two))
|
||||||
&& character.map_or(false, |c| c.is_wield()))
|
&& character.map_or(false, |c| c.is_wield()))
|
||||||
&& !character.map_or(false, |c| c.is_using_hands())
|
&& !character.map_or(false, |c| c.is_using_hands())
|
||||||
&& physics.in_liquid().is_none();
|
&& physics.in_liquid().is_none();
|
||||||
|
|
||||||
|
@ -250,7 +250,7 @@ pub fn clip_object_by_plane<T: Float + MulAdd<T, T, Output = T> + core::fmt::Deb
|
|||||||
tolerance: T,
|
tolerance: T,
|
||||||
) {
|
) {
|
||||||
let mut intersection_points = Vec::new();
|
let mut intersection_points = Vec::new();
|
||||||
polys.drain_filter(|points| {
|
polys.retain_mut(|points| {
|
||||||
let len = intersection_points.len();
|
let len = intersection_points.len();
|
||||||
let outside_first = clip_points_by_plane(points, plane, &mut intersection_points);
|
let outside_first = clip_points_by_plane(points, plane, &mut intersection_points);
|
||||||
// Only remember intersections that are not coplanar with this side; i.e. those
|
// Only remember intersections that are not coplanar with this side; i.e. those
|
||||||
@ -273,7 +273,7 @@ pub fn clip_object_by_plane<T: Float + MulAdd<T, T, Output = T> + core::fmt::Deb
|
|||||||
intersection_points.swap(len, len + 1);
|
intersection_points.swap(len, len + 1);
|
||||||
}
|
}
|
||||||
// Remove polygon if it was clipped away
|
// Remove polygon if it was clipped away
|
||||||
points.is_empty()
|
!points.is_empty()
|
||||||
});
|
});
|
||||||
// Add a polygon of all intersection points with the plane to close out the
|
// Add a polygon of all intersection points with the plane to close out the
|
||||||
// object.
|
// object.
|
||||||
|
@ -763,9 +763,9 @@ impl Scene {
|
|||||||
renderer.update_consts(&mut self.data.lights, lights);
|
renderer.update_consts(&mut self.data.lights, lights);
|
||||||
|
|
||||||
// Update event lights
|
// Update event lights
|
||||||
self.event_lights.drain_filter(|el| {
|
self.event_lights.retain_mut(|el| {
|
||||||
el.timeout -= dt;
|
el.timeout -= dt;
|
||||||
el.timeout <= 0.0
|
el.timeout > 0.0
|
||||||
});
|
});
|
||||||
|
|
||||||
// Update shadow constants
|
// Update shadow constants
|
||||||
|
@ -137,7 +137,7 @@ impl Scene {
|
|||||||
figure_state: None,
|
figure_state: None,
|
||||||
|
|
||||||
backdrop: backdrop.map(|specifier| {
|
backdrop: backdrop.map(|specifier| {
|
||||||
let mut state = FigureState::new(renderer, FixtureSkeleton::default(), ());
|
let mut state = FigureState::new(renderer, FixtureSkeleton, ());
|
||||||
let mut greedy = FigureModel::make_greedy();
|
let mut greedy = FigureModel::make_greedy();
|
||||||
let mut opaque_mesh = Mesh::new();
|
let mut opaque_mesh = Mesh::new();
|
||||||
let (segment, offset) = load_mesh(specifier, Vec3::new(-55.0, -49.5, -2.0));
|
let (segment, offset) = load_mesh(specifier, Vec3::new(-55.0, -49.5, -2.0));
|
||||||
|
@ -437,8 +437,8 @@ fn mesh_worker(
|
|||||||
(
|
(
|
||||||
deep_level
|
deep_level
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain(shallow_level.into_iter())
|
.chain(shallow_level)
|
||||||
.chain(surface_level.into_iter())
|
.chain(surface_level)
|
||||||
.collect(),
|
.collect(),
|
||||||
alt_indices,
|
alt_indices,
|
||||||
)
|
)
|
||||||
|
@ -59,7 +59,7 @@ impl Interactable {
|
|||||||
volume_pos: VolumePos,
|
volume_pos: VolumePos,
|
||||||
interaction: Interaction,
|
interaction: Interaction,
|
||||||
) -> Option<Self> {
|
) -> Option<Self> {
|
||||||
let Some(block) = volume_pos.get_block(terrain, id_maps, colliders) else { return None };
|
let block = volume_pos.get_block(terrain, id_maps, colliders)?;
|
||||||
let block_interaction = match interaction {
|
let block_interaction = match interaction {
|
||||||
Interaction::Collect => {
|
Interaction::Collect => {
|
||||||
// Check if this is an unlockable sprite
|
// Check if this is an unlockable sprite
|
||||||
@ -114,11 +114,11 @@ impl Interactable {
|
|||||||
/// interact with if the interact key is pressed
|
/// interact with if the interact key is pressed
|
||||||
/// Selected in the following order:
|
/// Selected in the following order:
|
||||||
/// 1) Targeted items, in order of nearest under cursor:
|
/// 1) Targeted items, in order of nearest under cursor:
|
||||||
/// (a) entity (if within range)
|
/// a) entity (if within range)
|
||||||
/// (b) collectable
|
/// b) collectable
|
||||||
/// (c) can be mined, and is a mine sprite (Air) not a weak rock.
|
/// c) can be mined, and is a mine sprite (Air) not a weak rock.
|
||||||
/// 2) outside of targeted cam ray
|
/// 2) outside of targeted cam ray
|
||||||
/// -> closest of nearest interactable entity/block
|
/// -> closest of nearest interactable entity/block
|
||||||
pub(super) fn select_interactable(
|
pub(super) fn select_interactable(
|
||||||
client: &Client,
|
client: &Client,
|
||||||
collect_target: Option<Target<target::Collectable>>,
|
collect_target: Option<Target<target::Collectable>>,
|
||||||
|
@ -216,7 +216,7 @@ settings_change_from!(Accessibility);
|
|||||||
|
|
||||||
impl SettingsChange {
|
impl SettingsChange {
|
||||||
pub fn process(self, global_state: &mut GlobalState, session_state: &mut SessionState) {
|
pub fn process(self, global_state: &mut GlobalState, session_state: &mut SessionState) {
|
||||||
let mut settings = &mut global_state.settings;
|
let settings = &mut global_state.settings;
|
||||||
|
|
||||||
match self {
|
match self {
|
||||||
SettingsChange::Audio(audio_change) => {
|
SettingsChange::Audio(audio_change) => {
|
||||||
@ -366,7 +366,7 @@ impl SettingsChange {
|
|||||||
},
|
},
|
||||||
SettingsChange::Gamepad(gamepad_change) => match gamepad_change {},
|
SettingsChange::Gamepad(gamepad_change) => match gamepad_change {},
|
||||||
SettingsChange::Gameplay(gameplay_change) => {
|
SettingsChange::Gameplay(gameplay_change) => {
|
||||||
let mut window = &mut global_state.window;
|
let window = &mut global_state.window;
|
||||||
match gameplay_change {
|
match gameplay_change {
|
||||||
Gameplay::AdjustMousePan(sensitivity) => {
|
Gameplay::AdjustMousePan(sensitivity) => {
|
||||||
window.pan_sensitivity = sensitivity;
|
window.pan_sensitivity = sensitivity;
|
||||||
|
@ -36,7 +36,7 @@ fn load_map(path: &Path) -> Option<SingleplayerWorld> {
|
|||||||
let meta_path = path.join("meta.ron");
|
let meta_path = path.join("meta.ron");
|
||||||
|
|
||||||
let Ok(f) = fs::File::open(&meta_path) else {
|
let Ok(f) = fs::File::open(&meta_path) else {
|
||||||
error!("Failed to open {}", meta_path.to_string_lossy());
|
error!("Failed to open {}", meta_path.to_string_lossy());
|
||||||
return None;
|
return None;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@ mod widget;
|
|||||||
|
|
||||||
pub use defaults::Defaults;
|
pub use defaults::Defaults;
|
||||||
|
|
||||||
pub(self) use primitive::Primitive;
|
use primitive::Primitive;
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
super::graphic::{self, Graphic, TexId},
|
super::graphic::{self, Graphic, TexId},
|
||||||
|
@ -675,6 +675,7 @@ impl Window {
|
|||||||
.game_analog_button_map
|
.game_analog_button_map
|
||||||
.get(&AnalogButton::from((button, code)))
|
.get(&AnalogButton::from((button, code)))
|
||||||
{
|
{
|
||||||
|
#[allow(clippy::never_loop)]
|
||||||
for action in actions {
|
for action in actions {
|
||||||
match *action {}
|
match *action {}
|
||||||
}
|
}
|
||||||
@ -684,6 +685,7 @@ impl Window {
|
|||||||
.menu_analog_button_map
|
.menu_analog_button_map
|
||||||
.get(&AnalogButton::from((button, code)))
|
.get(&AnalogButton::from((button, code)))
|
||||||
{
|
{
|
||||||
|
#[allow(clippy::never_loop)]
|
||||||
for action in actions {
|
for action in actions {
|
||||||
match *action {}
|
match *action {}
|
||||||
}
|
}
|
||||||
|
@ -34,7 +34,7 @@ tracing = { workspace = true }
|
|||||||
rand = { workspace = true }
|
rand = { workspace = true }
|
||||||
rand_chacha = { workspace = true }
|
rand_chacha = { workspace = true }
|
||||||
arr_macro = "0.2.1"
|
arr_macro = "0.2.1"
|
||||||
packed_simd = { package = "packed_simd_2", version = "0.3.8", optional = true }
|
packed_simd = { version = "0.3.9", optional = true }
|
||||||
rayon = { workspace = true }
|
rayon = { workspace = true }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
ron = { workspace = true }
|
ron = { workspace = true }
|
||||||
|
@ -691,7 +691,7 @@ fn main() {
|
|||||||
let k = 32;
|
let k = 32;
|
||||||
let sz = world.sim().get_size();
|
let sz = world.sim().get_size();
|
||||||
|
|
||||||
let sites = vec![
|
let sites = [
|
||||||
("center", sz / 2),
|
("center", sz / 2),
|
||||||
(
|
(
|
||||||
"dungeon",
|
"dungeon",
|
||||||
|
@ -11,7 +11,7 @@ fn main() {
|
|||||||
]);
|
]);
|
||||||
let mut middle = cons.clone();
|
let mut middle = cons.clone();
|
||||||
middle.extend(vec!["tt"]);
|
middle.extend(vec!["tt"]);
|
||||||
let vowel = vec!["o", "e", "a", "i", "u", "au", "ee", "ow", "ay", "ey", "oe"];
|
let vowel = ["o", "e", "a", "i", "u", "au", "ee", "ow", "ay", "ey", "oe"];
|
||||||
let end = vec![
|
let end = vec![
|
||||||
"et", "ige", "age", "ist", "en", "on", "og", "end", "ind", "ock", "een", "edge", "ist",
|
"et", "ige", "age", "ist", "en", "on", "og", "end", "ind", "ock", "een", "edge", "ist",
|
||||||
"ed", "est", "eed", "ast", "olt", "ey", "ean", "ead", "onk", "ink", "eon", "er", "ow",
|
"ed", "est", "eed", "ast", "olt", "ey", "ean", "ead", "onk", "ink", "eon", "er", "ow",
|
||||||
|
@ -196,10 +196,7 @@ fn palette(conn: Connection) -> Result<(), Box<dyn Error>> {
|
|||||||
let kind = BlockKind::from_str(&row.get::<_, String>(0)?)?;
|
let kind = BlockKind::from_str(&row.get::<_, String>(0)?)?;
|
||||||
let rgb: Rgb<u8> = Rgb::new(row.get(1)?, row.get(2)?, row.get(3)?);
|
let rgb: Rgb<u8> = Rgb::new(row.get(1)?, row.get(2)?, row.get(3)?);
|
||||||
let count: i64 = row.get(4)?;
|
let count: i64 = row.get(4)?;
|
||||||
block_colors
|
block_colors.entry(kind).or_default().push((rgb, count));
|
||||||
.entry(kind)
|
|
||||||
.or_insert_with(Vec::new)
|
|
||||||
.push((rgb, count));
|
|
||||||
}
|
}
|
||||||
for (_, v) in block_colors.iter_mut() {
|
for (_, v) in block_colors.iter_mut() {
|
||||||
v.sort_by(|a, b| b.1.cmp(&a.1));
|
v.sort_by(|a, b| b.1.cmp(&a.1));
|
||||||
@ -207,7 +204,7 @@ fn palette(conn: Connection) -> Result<(), Box<dyn Error>> {
|
|||||||
|
|
||||||
let mut palettes: HashMap<BlockKind, Vec<Rgb<u8>>> = HashMap::new();
|
let mut palettes: HashMap<BlockKind, Vec<Rgb<u8>>> = HashMap::new();
|
||||||
for (kind, colors) in block_colors.iter() {
|
for (kind, colors) in block_colors.iter() {
|
||||||
let palette = palettes.entry(*kind).or_insert_with(Vec::new);
|
let palette = palettes.entry(*kind).or_default();
|
||||||
if colors.len() <= 256 {
|
if colors.len() <= 256 {
|
||||||
for (color, _) in colors {
|
for (color, _) in colors {
|
||||||
palette.push(*color);
|
palette.push(*color);
|
||||||
|
@ -741,7 +741,7 @@ impl Civs {
|
|||||||
1 << ((to_next_idx as u8 + 4) % 8);
|
1 << ((to_next_idx as u8 + 4) % 8);
|
||||||
}
|
}
|
||||||
for loc in path.iter() {
|
for loc in path.iter() {
|
||||||
let mut chunk = ctx.sim.get_mut(loc.0).unwrap();
|
let chunk = ctx.sim.get_mut(loc.0).unwrap();
|
||||||
let depth = loc.1 * 250.0 - 20.0;
|
let depth = loc.1 * 250.0 - 20.0;
|
||||||
chunk.cave.1.alt =
|
chunk.cave.1.alt =
|
||||||
chunk.alt - depth + ctx.rng.gen_range(-4.0..4.0) * (depth > 10.0) as i32 as f32;
|
chunk.alt - depth + ctx.rng.gen_range(-4.0..4.0) * (depth > 10.0) as i32 as f32;
|
||||||
@ -1408,7 +1408,7 @@ impl Civs {
|
|||||||
(1 << (to_prev_idx as u8)) | (1 << (to_next_idx as u8));
|
(1 << (to_prev_idx as u8)) | (1 << (to_next_idx as u8));
|
||||||
*/
|
*/
|
||||||
if randomize_offset {
|
if randomize_offset {
|
||||||
let mut chunk = ctx.sim.get_mut(locs[1]).unwrap();
|
let chunk = ctx.sim.get_mut(locs[1]).unwrap();
|
||||||
chunk.path.0.offset =
|
chunk.path.0.offset =
|
||||||
Vec2::new(ctx.rng.gen_range(-16..17), ctx.rng.gen_range(-16..17));
|
Vec2::new(ctx.rng.gen_range(-16..17), ctx.rng.gen_range(-16..17));
|
||||||
}
|
}
|
||||||
@ -1569,7 +1569,9 @@ fn walk_in_all_dirs(
|
|||||||
|
|
||||||
let adjacents = NEIGHBORS.map(|dir| a + dir);
|
let adjacents = NEIGHBORS.map(|dir| a + dir);
|
||||||
|
|
||||||
let Some(a_chunk) = sim.get(a) else { return potential };
|
let Some(a_chunk) = sim.get(a) else {
|
||||||
|
return potential;
|
||||||
|
};
|
||||||
let mut chunks = [None; 8];
|
let mut chunks = [None; 8];
|
||||||
for i in 0..8 {
|
for i in 0..8 {
|
||||||
if loc_suitable_for_walking(sim, adjacents[i]) {
|
if loc_suitable_for_walking(sim, adjacents[i]) {
|
||||||
|
@ -145,7 +145,9 @@ impl Tunnel {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn biome_at(&self, wpos: Vec3<i32>, info: &CanvasInfo) -> Biome {
|
fn biome_at(&self, wpos: Vec3<i32>, info: &CanvasInfo) -> Biome {
|
||||||
let Some(col) = info.col_or_gen(wpos.xy()) else { return Biome::default() };
|
let Some(col) = info.col_or_gen(wpos.xy()) else {
|
||||||
|
return Biome::default();
|
||||||
|
};
|
||||||
|
|
||||||
// Below the ground
|
// Below the ground
|
||||||
let below = ((col.alt - wpos.z as f32) / 120.0).clamped(0.0, 1.0);
|
let below = ((col.alt - wpos.z as f32) / 120.0).clamped(0.0, 1.0);
|
||||||
|
@ -1036,7 +1036,6 @@ pub fn apply_caverns_to<R: Rng>(canvas: &mut Canvas, dynamic_rng: &mut R) {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let cavern_top = cavern_top;
|
|
||||||
let mut last_kind = BlockKind::Rock;
|
let mut last_kind = BlockKind::Rock;
|
||||||
for z in cavern_bottom - 1..cavern_top {
|
for z in cavern_bottom - 1..cavern_top {
|
||||||
use SpriteKind::*;
|
use SpriteKind::*;
|
||||||
|
@ -2,7 +2,8 @@
|
|||||||
#![allow(
|
#![allow(
|
||||||
clippy::option_map_unit_fn,
|
clippy::option_map_unit_fn,
|
||||||
clippy::blocks_in_if_conditions,
|
clippy::blocks_in_if_conditions,
|
||||||
clippy::identity_op
|
clippy::identity_op,
|
||||||
|
clippy::needless_pass_by_ref_mut //until we find a better way for specs
|
||||||
)]
|
)]
|
||||||
#![allow(clippy::branches_sharing_code)] // TODO: evaluate
|
#![allow(clippy::branches_sharing_code)] // TODO: evaluate
|
||||||
#![deny(clippy::clone_on_ref_ptr)]
|
#![deny(clippy::clone_on_ref_ptr)]
|
||||||
|
@ -322,7 +322,7 @@ pub fn get_rivers<F: fmt::Debug + Float + Into<f64>, G: Float + Into<f64>>(
|
|||||||
let pass_idx = (-indirection_idx) as usize;
|
let pass_idx = (-indirection_idx) as usize;
|
||||||
// NOTE: Must exist since this lake had a downhill in the first place.
|
// NOTE: Must exist since this lake had a downhill in the first place.
|
||||||
let neighbor_pass_idx = downhill[pass_idx] as usize/*downhill_idx*/;
|
let neighbor_pass_idx = downhill[pass_idx] as usize/*downhill_idx*/;
|
||||||
let mut lake_neighbor_pass = &mut rivers[neighbor_pass_idx];
|
let lake_neighbor_pass = &mut rivers[neighbor_pass_idx];
|
||||||
// We definitely shouldn't have encountered this yet!
|
// We definitely shouldn't have encountered this yet!
|
||||||
debug_assert!(lake_neighbor_pass.velocity == Vec3::zero());
|
debug_assert!(lake_neighbor_pass.velocity == Vec3::zero());
|
||||||
// TODO: Rethink making the lake neighbor pass always a river or lake, no matter
|
// TODO: Rethink making the lake neighbor pass always a river or lake, no matter
|
||||||
@ -388,7 +388,7 @@ pub fn get_rivers<F: fmt::Debug + Float + Into<f64>, G: Float + Into<f64>>(
|
|||||||
river_spline_derivative,
|
river_spline_derivative,
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
let mut lake = &mut rivers[chunk_idx];
|
let lake = &mut rivers[chunk_idx];
|
||||||
lake.spline_derivative = river_spline_derivative;
|
lake.spline_derivative = river_spline_derivative;
|
||||||
lake.river_kind = Some(RiverKind::Lake {
|
lake.river_kind = Some(RiverKind::Lake {
|
||||||
neighbor_pass_pos: neighbor_pass_pos
|
neighbor_pass_pos: neighbor_pass_pos
|
||||||
@ -495,7 +495,7 @@ pub fn get_rivers<F: fmt::Debug + Float + Into<f64>, G: Float + Into<f64>>(
|
|||||||
// CONFIG.river_min_height.
|
// CONFIG.river_min_height.
|
||||||
let river = &rivers[chunk_idx];
|
let river = &rivers[chunk_idx];
|
||||||
let is_river = river.is_river() || width >= 0.5 && height >= CONFIG.river_min_height as f64;
|
let is_river = river.is_river() || width >= 0.5 && height >= CONFIG.river_min_height as f64;
|
||||||
let mut downhill_river = &mut rivers[downhill_idx];
|
let downhill_river = &mut rivers[downhill_idx];
|
||||||
|
|
||||||
if is_river {
|
if is_river {
|
||||||
// Provisionally make the downhill chunk a river as well.
|
// Provisionally make the downhill chunk a river as well.
|
||||||
@ -532,7 +532,7 @@ pub fn get_rivers<F: fmt::Debug + Float + Into<f64>, G: Float + Into<f64>>(
|
|||||||
velocity.normalize();
|
velocity.normalize();
|
||||||
velocity *= velocity_magnitude;
|
velocity *= velocity_magnitude;
|
||||||
|
|
||||||
let mut river = &mut rivers[chunk_idx];
|
let river = &mut rivers[chunk_idx];
|
||||||
// NOTE: Not trying to do this more cleverly because we want to keep the river's
|
// NOTE: Not trying to do this more cleverly because we want to keep the river's
|
||||||
// neighbors. TODO: Actually put something in the neighbors.
|
// neighbors. TODO: Actually put something in the neighbors.
|
||||||
river.velocity = velocity.map(|e| e as f32);
|
river.velocity = velocity.map(|e| e as f32);
|
||||||
@ -636,31 +636,31 @@ impl m32 {
|
|||||||
///
|
///
|
||||||
/// This algorithm does this in four steps:
|
/// This algorithm does this in four steps:
|
||||||
///
|
///
|
||||||
/// 1. Sort the nodes in h by height (so the lowest node by altitude is first
|
/// 1. Sort the nodes in h by height (so the lowest node by altitude is first in
|
||||||
/// in the list, and the highest node by altitude is last).
|
/// the list, and the highest node by altitude is last).
|
||||||
/// 2. Iterate through the list in *reverse.* For each node, we compute its
|
/// 2. Iterate through the list in *reverse.* For each node, we compute its
|
||||||
/// drainage area as the sum of the drainage areas of its "children" nodes
|
/// drainage area as the sum of the drainage areas of its "children" nodes
|
||||||
/// (i.e. the nodes with directed edges to this node). To do this
|
/// (i.e. the nodes with directed edges to this node). To do this
|
||||||
/// efficiently, we start with the "leaves" (the highest nodes), which
|
/// efficiently, we start with the "leaves" (the highest nodes), which have
|
||||||
/// have no neighbors higher than them, hence no directed edges to them.
|
/// no neighbors higher than them, hence no directed edges to them. We add
|
||||||
/// We add their area to themselves, and then to all neighbors that they
|
/// their area to themselves, and then to all neighbors that they flow into
|
||||||
/// flow into (their "ancestors" in the flow graph); currently, this just
|
/// (their "ancestors" in the flow graph); currently, this just means the
|
||||||
/// means the node immediately downhill of this node. As we go lower, we
|
/// node immediately downhill of this node. As we go lower, we know that all
|
||||||
/// know that all our "children" already had their areas computed, which
|
/// our "children" already had their areas computed, which means that we can
|
||||||
/// means that we can repeat the process in order to derive all the final
|
/// repeat the process in order to derive all the final areas.
|
||||||
/// areas.
|
|
||||||
/// 3. Now, iterate through the list in *order.* Whether we used the filling
|
/// 3. Now, iterate through the list in *order.* Whether we used the filling
|
||||||
/// method to compute a "filled" version of each depression, or used the lake
|
/// method to compute a "filled" version of each depression, or used the lake
|
||||||
/// connection algorithm described in [1], each node is guaranteed to have
|
/// connection algorithm described in [1], each node is guaranteed to have
|
||||||
/// zero or one drainage edges out, representing the direction of water flow
|
/// zero or one drainage edges out, representing the direction of water flow
|
||||||
/// for that node. For nodes i with zero drainage edges out (boundary nodes
|
/// for that node. For nodes i with zero drainage edges out (boundary nodes
|
||||||
/// and lake bottoms) we set the slope to 0 (so the change in altitude is
|
/// and lake bottoms) we set the slope to 0 (so the change in altitude is
|
||||||
/// uplift(i))
|
/// uplift(i)).
|
||||||
/// For nodes with at least one drainage edge out, we take advantage of the
|
///
|
||||||
/// fact that we are computing new heights in order and rewrite our equation
|
/// For nodes with at least one drainage edge out, we take
|
||||||
/// as (letting j = downhill[i], A[i] be the computed area of point i,
|
/// advantage of the fact that we are computing new heights in order and
|
||||||
/// p(i) be the x-y position of point i,
|
/// rewrite our equation as (letting j = downhill[i], A[i] be the computed
|
||||||
/// flux(i) = k * A[i]^m / ((p(i) - p(j)).magnitude()), and δt = 1):
|
/// area of point i, p(i) be the x-y position of point i, flux(i) = k *
|
||||||
|
/// A[i]^m / ((p(i) - p(j)).magnitude()), and δt = 1):
|
||||||
///
|
///
|
||||||
/// h[i](t + dt) = h[i](t) + δt * (uplift[i] + flux(i) * h[j](t + δt)) / (1 +
|
/// h[i](t + dt) = h[i](t) + δt * (uplift[i] + flux(i) * h[j](t + δt)) / (1 +
|
||||||
/// flux(i) * δt).
|
/// flux(i) * δt).
|
||||||
|
@ -53,8 +53,8 @@ pub fn map_edge_factor(map_size_lg: MapSizeLg, posi: usize) -> f32 {
|
|||||||
/// At some point, we should probably contribute this back to stats-rs.
|
/// At some point, we should probably contribute this back to stats-rs.
|
||||||
///
|
///
|
||||||
/// 1. [https://www.r-bloggers.com/sums-of-random-variables/][1],
|
/// 1. [https://www.r-bloggers.com/sums-of-random-variables/][1],
|
||||||
/// 2. Sadooghi-Alvandi, S., A. Nematollahi, & R. Habibi, 2009.
|
/// 2. Sadooghi-Alvandi, S., A. Nematollahi, & R. Habibi, 2009. On the
|
||||||
/// On the Distribution of the Sum of Independent Uniform Random Variables.
|
/// Distribution of the Sum of Independent Uniform Random Variables.
|
||||||
/// Statistical Papers, 50, 171-175.
|
/// Statistical Papers, 50, 171-175.
|
||||||
/// 3. [https://en.wikipedia.org/wiki/Cumulative_distribution_function][3]
|
/// 3. [https://en.wikipedia.org/wiki/Cumulative_distribution_function][3]
|
||||||
///
|
///
|
||||||
|
@ -31,7 +31,7 @@ impl<'a, R: Rng> NameGen<'a, R> {
|
|||||||
]);
|
]);
|
||||||
let mut middle = cons.clone();
|
let mut middle = cons.clone();
|
||||||
middle.extend(vec!["tt"]);
|
middle.extend(vec!["tt"]);
|
||||||
let vowel = vec!["o", "e", "a", "i", "u", "au", "ee", "ow", "ay", "ey", "oe"];
|
let vowel = ["o", "e", "a", "i", "u", "au", "ee", "ow", "ay", "ey", "oe"];
|
||||||
let end = vec![
|
let end = vec![
|
||||||
"et", "ige", "age", "ist", "en", "on", "og", "end", "ind", "ock", "een", "edge", "ist",
|
"et", "ige", "age", "ist", "en", "on", "og", "end", "ind", "ock", "een", "edge", "ist",
|
||||||
"ed", "est", "eed", "ast", "olt", "ey", "ean", "ead", "onk", "ink", "eon", "er", "ow",
|
"ed", "est", "eed", "ast", "olt", "ey", "ean", "ead", "onk", "ink", "eon", "er", "ow",
|
||||||
|
@ -641,7 +641,7 @@ impl Archetype for House {
|
|||||||
% 6
|
% 6
|
||||||
{
|
{
|
||||||
0 => SpriteKind::HangingSign,
|
0 => SpriteKind::HangingSign,
|
||||||
1 | 2 | 3 => SpriteKind::HangingBasket,
|
1..=3 => SpriteKind::HangingBasket,
|
||||||
4 => SpriteKind::WallSconce,
|
4 => SpriteKind::WallSconce,
|
||||||
5 => SpriteKind::WallLampSmall,
|
5 => SpriteKind::WallLampSmall,
|
||||||
_ => SpriteKind::DungeonWallDecor,
|
_ => SpriteKind::DungeonWallDecor,
|
||||||
|
@ -1414,7 +1414,7 @@ impl Land {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
closed.into_iter().chain(open.into_iter()).collect()
|
closed.into_iter().chain(open).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_path(
|
fn write_path(
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user