first part of load items from plugin

This commit is contained in:
Christof Petig 2023-10-14 22:35:06 +02:00
parent 0d5017932d
commit 12ee21a289
8 changed files with 557 additions and 9 deletions

2
Cargo.lock generated
View File

@ -6872,10 +6872,12 @@ version = "0.10.0"
dependencies = [
"assets_manager",
"dot_vox",
"hashbrown 0.13.2",
"image",
"lazy_static",
"ron 0.8.1",
"serde",
"tar",
"tracing",
"walkdir",
"wavefront",

View File

@ -13,13 +13,17 @@ dot_vox = "5.1"
wavefront = "0.2" # TODO: Use vertex-colors branch when we have models that have them
image = { workspace = true }
tracing = { workspace = true }
tar = { version = "0.4.37", optional = true }
# asset tweak
serde = { workspace = true, optional = true }
hashbrown = { workspace = true, optional = true }
[dev-dependencies]
walkdir = "2.3.2"
[features]
hot-reloading = ["assets_manager/hot-reloading"]
asset_tweak = ["serde", "hot-reloading"]
asset_tweak = ["dep:serde", "hot-reloading"]
hashbrown = ["dep:hashbrown"]
plugins = ["dep:serde", "dep:tar"]

View File

@ -4,7 +4,13 @@
use dot_vox::DotVoxData;
use image::DynamicImage;
use lazy_static::lazy_static;
use std::{borrow::Cow, path::PathBuf, sync::Arc};
use std::{
borrow::Cow,
collections::HashMap,
hash::{BuildHasher, Hash},
path::PathBuf,
sync::Arc,
};
pub use assets_manager::{
asset::{DirLoadable, Ron},
@ -16,13 +22,15 @@ pub use assets_manager::{
};
mod fs;
mod plugin_cache;
#[cfg(feature = "plugins")] mod tar_source;
mod walk;
pub use plugin_cache::register_tar;
pub use walk::{walk_tree, Walk};
lazy_static! {
/// The HashMap where all loaded assets are stored in.
static ref ASSETS: AssetCache<fs::FileSystem> =
AssetCache::with_source(fs::FileSystem::new().unwrap());
static ref ASSETS: plugin_cache::CombinedCache = plugin_cache::CombinedCache::new().unwrap();
}
#[cfg(feature = "hot-reloading")]
@ -209,6 +217,56 @@ impl Loader<ObjAsset> for ObjAssetLoader {
}
}
pub trait Concatenate {
fn concatenate(self, b: Self) -> Self;
}
impl<K: Eq + Hash, V, S: BuildHasher> Concatenate for HashMap<K, V, S> {
fn concatenate(mut self, b: Self) -> Self {
self.extend(b);
self
}
}
impl<V> Concatenate for Vec<V> {
fn concatenate(mut self, b: Self) -> Self {
self.extend(b);
self
}
}
#[cfg(feature = "hashbrown")]
impl<K: Eq + Hash, V, S: BuildHasher> Concatenate for hashbrown::HashMap<K, V, S> {
fn concatenate(mut self, b: Self) -> Self {
self.extend(b);
self
}
}
impl<T: Concatenate> Concatenate for Ron<T> {
fn concatenate(self, _b: Self) -> Self { todo!() }
}
/// This wrapper combines several RON files from multiple sources
pub struct MultiRon<T>(pub T);
impl<T: Clone> Clone for MultiRon<T> {
fn clone(&self) -> Self { Self(self.0.clone()) }
fn clone_from(&mut self, source: &Self) { self.0.clone_from(&source.0) }
}
impl<T> Compound for MultiRon<T>
where
T: for<'de> serde::Deserialize<'de> + Send + Sync + 'static + Concatenate,
{
fn load(_cache: AnyCache, id: &SharedString) -> Result<Self, BoxedError> {
ASSETS
.combine(|cache: AnyCache| <Ron<T> as Compound>::load(cache, id).map(|r| r.0))
.map(MultiRon)
}
}
/// Return path to repository root by searching 10 directories back
pub fn find_root() -> Option<PathBuf> {
std::env::current_dir().map_or(None, |path| {

View File

@ -0,0 +1,148 @@
use std::{path::PathBuf, sync::RwLock};
use crate::Concatenate;
use super::{fs::FileSystem, tar_source::Tar};
use assets_manager::{
source::{FileContent, Source},
AnyCache, AssetCache, BoxedError,
};
use lazy_static::lazy_static;
struct PluginEntry {
path: PathBuf,
cache: AssetCache<Tar>,
}
lazy_static! {
static ref PLUGIN_LIST: RwLock<Vec<PluginEntry>> = RwLock::new(Vec::new());
}
pub fn register_tar(path: PathBuf) -> Result<(), Box<dyn std::error::Error>> {
let tar_source = Tar::from_path(&path)?;
println!("Tar {:?} {:?}", path, tar_source);
let cache = AssetCache::with_source(tar_source);
PLUGIN_LIST.write()?.push(PluginEntry { path, cache });
Ok(())
}
/// The source combining filesystem and plugins (typically used via
/// CombinedCache)
#[derive(Debug, Clone)]
pub struct CombinedSource {
fs: FileSystem,
}
impl CombinedSource {
pub fn new() -> std::io::Result<Self> {
Ok(Self {
fs: FileSystem::new()?,
})
}
}
impl CombinedSource {
fn read_multiple(&self, id: &str, ext: &str) -> Vec<(Option<usize>, FileContent<'_>)> {
let mut result = Vec::new();
if let Ok(file_entry) = self.fs.read(id, ext) {
result.push((None, file_entry));
}
if let Ok(guard) = PLUGIN_LIST.read() {
for (n, p) in guard.iter().enumerate() {
if let Ok(entry) = p.cache.raw_source().read(id, ext) {
result.push((Some(n), match entry {
FileContent::Slice(s) => FileContent::Buffer(Vec::from(s)),
FileContent::Buffer(b) => FileContent::Buffer(b),
FileContent::Owned(s) => {
FileContent::Buffer(Vec::from(s.as_ref().as_ref()))
},
}));
}
}
}
result
}
// we don't want to keep the lock, so we clone
fn plugin_path(&self, index: Option<usize>) -> Option<PathBuf> {
if let Some(index) = index {
PLUGIN_LIST
.read()
.ok()
.and_then(|p| p.get(index).map(|p| p.path.clone()))
} else {
None
}
}
}
impl Source for CombinedSource {
fn read(&self, id: &str, ext: &str) -> std::io::Result<FileContent<'_>> {
// we could shortcut on fs if we dont want to check for conflicts
let mut entries = self.read_multiple(id, ext);
if entries.is_empty() {
Err(std::io::ErrorKind::NotFound.into())
} else {
if entries.len() > 1 {
tracing::error!(
"Duplicate asset {id} in read, plugins {:?} + {:?}",
self.plugin_path(entries[0].0),
self.plugin_path(entries[1].0)
);
}
Ok(entries.swap_remove(0).1)
}
}
fn read_dir(
&self,
id: &str,
f: &mut dyn FnMut(assets_manager::source::DirEntry),
) -> std::io::Result<()> {
// TODO: we should combine the sources
self.fs.read_dir(id, f)
}
fn exists(&self, entry: assets_manager::source::DirEntry) -> bool {
self.fs.exists(entry)
|| PLUGIN_LIST
.read()
.map(|p| p.iter().any(|p| p.cache.raw_source().exists(entry)))
.unwrap_or_default()
}
fn make_source(&self) -> Option<Box<dyn Source + Send>> { None }
}
/// A cache combining filesystem and plugin assets
pub struct CombinedCache(AssetCache<CombinedSource>);
impl CombinedCache {
pub fn new() -> std::io::Result<Self> {
CombinedSource::new().map(|s| Self(AssetCache::with_source(s)))
}
/// combine objects from filesystem and plugins
pub fn combine<T: Concatenate>(
&self,
load_from: impl Fn(AnyCache) -> Result<T, BoxedError>,
) -> Result<T, BoxedError> {
let mut result = load_from(self.as_any_cache());
for i in PLUGIN_LIST.read().unwrap().iter() {
if let Ok(b) = load_from(i.cache.as_any_cache()) {
result = if let Ok(a) = result {
Ok(a.concatenate(b))
} else {
Ok(b)
};
}
}
result
}
}
impl std::ops::Deref for CombinedCache {
type Target = AssetCache<CombinedSource>;
fn deref(&self) -> &Self::Target { &self.0 }
}

View File

@ -0,0 +1,262 @@
use assets_manager::source::{DirEntry, FileContent, Source};
use hashbrown::HashMap;
use tar::EntryType;
use std::{
fmt,
fs::File,
hash, io,
os::unix::prelude::FileExt,
path::{self, Path, PathBuf},
};
// derived from the zip source from assets_manager
#[inline]
pub(crate) fn extension_of(path: &Path) -> Option<&str> {
match path.extension() {
Some(ext) => ext.to_str(),
None => Some(""),
}
}
#[derive(Clone, Hash, PartialEq, Eq)]
struct FileDesc(String, String);
/// This hack enables us to use a `(&str, &str)` as a key for an HashMap without
/// allocating a `FileDesc`
trait FileKey {
fn id(&self) -> &str;
fn ext(&self) -> &str;
}
impl FileKey for FileDesc {
fn id(&self) -> &str { &self.0 }
fn ext(&self) -> &str { &self.1 }
}
impl FileKey for (&'_ str, &'_ str) {
fn id(&self) -> &str { self.0 }
fn ext(&self) -> &str { self.1 }
}
impl<'a> std::borrow::Borrow<dyn FileKey + 'a> for FileDesc {
fn borrow(&self) -> &(dyn FileKey + 'a) { self }
}
impl PartialEq for dyn FileKey + '_ {
fn eq(&self, other: &Self) -> bool { self.id() == other.id() && self.ext() == other.ext() }
}
impl Eq for dyn FileKey + '_ {}
impl hash::Hash for dyn FileKey + '_ {
fn hash<H: hash::Hasher>(&self, hasher: &mut H) {
self.id().hash(hasher);
self.ext().hash(hasher);
}
}
impl fmt::Debug for FileDesc {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("FileDesc")
.field("id", &self.0)
.field("ext", &self.1)
.finish()
}
}
/// An entry in a archive directory.
#[derive(Debug)]
enum OwnedEntry {
File(FileDesc),
// Dir(String),
}
impl OwnedEntry {
fn as_dir_entry(&self) -> DirEntry {
match self {
OwnedEntry::File(FileDesc(desc0, desc1)) => DirEntry::File(desc0, desc1),
// OwnedEntry::Dir(id) => DirEntry::Directory(id),
}
}
}
/// Build ids from components.
///
/// Using this allows to easily reuse buffers when building several ids in a
/// row, and thus to avoid repeated allocations.
#[derive(Default)]
struct IdBuilder {
segments: Vec<String>,
len: usize,
}
impl IdBuilder {
/// Pushs a segment in the builder.
#[inline]
fn push(&mut self, s: &str) {
match self.segments.get_mut(self.len) {
Some(seg) => {
seg.clear();
seg.push_str(s);
},
None => self.segments.push(s.to_owned()),
}
self.len += 1;
}
/// Joins segments to build a id.
#[inline]
fn join(&self) -> String { self.segments[..self.len].join(".") }
/// Resets the builder without freeing buffers.
#[inline]
fn reset(&mut self) { self.len = 0; }
}
/// Register a file of an archive in maps.
fn register_file(
path: &Path,
position: usize,
length: usize,
files: &mut HashMap<FileDesc, (usize, usize)>,
dirs: &mut HashMap<String, Vec<OwnedEntry>>,
id_builder: &mut IdBuilder,
) {
id_builder.reset();
// Parse the path and register it.
// The closure is used as a cheap `try` block.
let ok = (|| {
// Fill `id_builder` from the parent's components
let parent = path.parent()?;
for comp in parent.components() {
match comp {
path::Component::Normal(s) => {
let segment = s.to_str()?;
if segment.contains('.') {
return None;
}
id_builder.push(segment);
},
_ => return None,
}
}
// Build the ids of the file and its parent.
let parent_id = id_builder.join();
id_builder.push(path.file_stem()?.to_str()?);
let id = id_builder.join();
// Register the file in the maps.
let ext = extension_of(path)?.to_owned();
let desc = FileDesc(id, ext);
files.insert(desc.clone(), (position, length));
let entry = OwnedEntry::File(desc);
dirs.entry(parent_id).or_default().push(entry);
Some(())
})()
.is_some();
if !ok {
tracing::warn!("Unsupported path in tar archive: {path:?}");
}
}
enum Backend {
File(PathBuf),
// Buffer(&'static [u8]),
}
impl Backend {
fn read(&self, pos: usize, len: usize) -> std::io::Result<Vec<u8>> {
match self {
Backend::File(path) => File::open(path).and_then(|file| {
let mut result = vec![0; len];
file.read_at(result.as_mut_slice(), pos as u64)
.map(move |_bytes| result)
}),
// Backend::Buffer(_) => todo!(),
}
}
}
pub struct Tar {
files: HashMap<FileDesc, (usize, usize)>,
dirs: HashMap<String, Vec<OwnedEntry>>,
backend: Backend,
}
impl Tar {
/// Creates a `Tar` from a file
pub fn from_path(path: &Path) -> io::Result<Tar> {
let file = File::open(path)?;
let mut tar = tar::Archive::new(file);
let contents = tar
.entries()
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
let mut files = HashMap::with_capacity(contents.size_hint().0);
let mut dirs = HashMap::new();
let mut id_builder = IdBuilder::default();
for e in contents.flatten() {
if matches!(e.header().entry_type(), EntryType::Regular) {
register_file(
e.path().map_err(io::Error::other)?.as_ref(),
e.raw_file_position() as usize,
e.size() as usize,
&mut files,
&mut dirs,
&mut id_builder,
);
}
}
Ok(Tar {
files,
dirs,
backend: Backend::File(path.to_path_buf()),
})
}
}
impl Source for Tar {
fn read(&self, id: &str, ext: &str) -> io::Result<FileContent> {
let key: &dyn FileKey = &(id, ext);
let id = *self
.files
.get(key)
.or_else(|| {
// also accept assets within the assets dir for now
let with_prefix = "assets.".to_string() + id;
let prefixed_key: &dyn FileKey = &(with_prefix.as_str(), ext);
self.files.get(prefixed_key)
})
.ok_or(io::ErrorKind::NotFound)?;
self.backend.read(id.0, id.1).map(FileContent::Buffer)
}
fn read_dir(&self, id: &str, f: &mut dyn FnMut(DirEntry)) -> io::Result<()> {
let dir = self.dirs.get(id).ok_or(io::ErrorKind::NotFound)?;
dir.iter().map(OwnedEntry::as_dir_entry).for_each(f);
Ok(())
}
fn exists(&self, entry: DirEntry) -> bool {
match entry {
DirEntry::File(id, ext) => self.files.contains_key(&(id, ext) as &dyn FileKey),
DirEntry::Directory(id) => self.dirs.contains_key(id),
}
}
}
impl fmt::Debug for Tar {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Tar")
.field("files", &self.files)
.field("dirs", &self.dirs)
.finish()
}
}

View File

@ -196,8 +196,14 @@ impl PluginMgr {
.unwrap_or(false)
{
info!("Loading plugin at {:?}", entry.path());
Plugin::from_reader(fs::File::open(entry.path()).map_err(PluginError::Io)?)
.map(Some)
Plugin::from_reader(fs::File::open(entry.path()).map_err(PluginError::Io)?).map(
|o| {
if let Err(e) = common::assets::register_tar(entry.path()) {
error!("Plugin {:?} tar error {e:?}", entry.path());
}
Some(o)
},
)
} else {
Ok(None)
}

View File

@ -27,7 +27,7 @@ singleplayer = ["server"]
simd = ["vek/platform_intrinsics"]
tracy = ["common-frontend/tracy", "client/tracy"]
tracy-memory = ["tracy"] # enables heap profiling with tracy
plugins = ["client/plugins"]
plugins = ["client/plugins", "common-assets/plugins", "common-assets/hashbrown"]
egui-ui = ["voxygen-egui", "egui", "egui_wgpu_backend", "egui_winit_platform"]
shaderc-from-source = ["shaderc/build-from-source"]
discord = ["discord-sdk"]

View File

@ -2,7 +2,7 @@ use super::cache::{
FigureKey, FigureModelEntryFuture, ModelEntryFuture, TerrainModelEntryFuture, ToolKey,
};
use common::{
assets::{self, AssetExt, AssetHandle, DotVoxAsset, ReloadWatcher, Ron},
assets::{self, AssetExt, AssetHandle, Concatenate, DotVoxAsset, MultiRon, ReloadWatcher},
comp::{
arthropod::{self, BodyType as ABodyType, Species as ASpecies},
biped_large::{self, BodyType as BLBodyType, Species as BLSpecies},
@ -143,7 +143,7 @@ macro_rules! make_vox_spec {
) => {
#[derive(Clone)]
pub struct $Spec {
$( $field: AssetHandle<Ron<$ty>>, )*
$( $field: AssetHandle<MultiRon<$ty>>, )*
}
impl assets::Compound for $Spec {
@ -178,6 +178,13 @@ macro_rules! make_vox_spec {
}
}
}
macro_rules! concatenate_tuple {
($name:ty) => {
impl Concatenate for $name {
fn concatenate(self, b: Self) -> Self { Self(self.0.concatenate(b.0)) }
}
};
}
// All offsets should be relative to an initial origin that doesn't change when
// combining segments
@ -259,6 +266,10 @@ impl HumColorSpec {
}
}
impl Concatenate for HumColorSpec {
fn concatenate(self, _b: Self) -> Self { todo!("Can't concatenate HumColorSpec") }
}
// All reliant on humanoid::Species and humanoid::BodyType
#[derive(Deserialize)]
struct HumHeadSubSpec {
@ -364,6 +375,7 @@ impl HumHeadSpec {
)
}
}
concatenate_tuple!(HumHeadSpec);
// Armor aspects should be in the same order, top to bottom.
// These seem overly split up, but wanted to keep the armor seperated
@ -376,32 +388,53 @@ where
default: S,
map: HashMap<K, S>,
}
impl<K: Hash + Eq, S> Concatenate for ArmorVoxSpecMap<K, S> {
fn concatenate(self, b: Self) -> Self {
Self {
default: self.default,
map: self.map.concatenate(b.map),
}
}
}
#[derive(Deserialize)]
struct HumArmorShoulderSpec(ArmorVoxSpecMap<String, SidedArmorVoxSpec>);
concatenate_tuple!(HumArmorShoulderSpec);
#[derive(Deserialize)]
struct HumArmorChestSpec(ArmorVoxSpecMap<String, ArmorVoxSpec>);
concatenate_tuple!(HumArmorChestSpec);
#[derive(Deserialize)]
struct HumArmorHandSpec(ArmorVoxSpecMap<String, SidedArmorVoxSpec>);
concatenate_tuple!(HumArmorHandSpec);
#[derive(Deserialize)]
struct HumArmorBeltSpec(ArmorVoxSpecMap<String, ArmorVoxSpec>);
concatenate_tuple!(HumArmorBeltSpec);
#[derive(Deserialize)]
struct HumArmorBackSpec(ArmorVoxSpecMap<String, ArmorVoxSpec>);
concatenate_tuple!(HumArmorBackSpec);
#[derive(Deserialize)]
struct HumArmorPantsSpec(ArmorVoxSpecMap<String, ArmorVoxSpec>);
concatenate_tuple!(HumArmorPantsSpec);
#[derive(Deserialize)]
struct HumArmorFootSpec(ArmorVoxSpecMap<String, ArmorVoxSpec>);
concatenate_tuple!(HumArmorFootSpec);
#[derive(Deserialize)]
struct HumMainWeaponSpec(HashMap<ToolKey, ArmorVoxSpec>);
concatenate_tuple!(HumMainWeaponSpec);
#[derive(Deserialize)]
struct HumModularComponentSpec(HashMap<String, ModularComponentSpec>);
concatenate_tuple!(HumModularComponentSpec);
#[derive(Deserialize)]
struct HumArmorLanternSpec(ArmorVoxSpecMap<String, ArmorVoxSpec>);
concatenate_tuple!(HumArmorLanternSpec);
#[derive(Deserialize)]
struct HumArmorGliderSpec(ArmorVoxSpecMap<String, ArmorVoxSpec>);
concatenate_tuple!(HumArmorGliderSpec);
#[derive(Deserialize)]
struct HumArmorHeadSpec(ArmorVoxSpecMap<(Species, BodyType, String), ArmorVoxSpec>);
concatenate_tuple!(HumArmorHeadSpec);
#[derive(Deserialize)]
struct HumArmorTabardSpec(ArmorVoxSpecMap<String, ArmorVoxSpec>);
concatenate_tuple!(HumArmorTabardSpec);
make_vox_spec!(
Body,
@ -1072,6 +1105,7 @@ fn mesh_hold() -> BoneMeshes {
//////
#[derive(Deserialize)]
struct QuadrupedSmallCentralSpec(HashMap<(QSSpecies, QSBodyType), SidedQSCentralVoxSpec>);
concatenate_tuple!(QuadrupedSmallCentralSpec);
#[derive(Deserialize)]
struct SidedQSCentralVoxSpec {
@ -1089,6 +1123,7 @@ struct QuadrupedSmallCentralSubSpec {
#[derive(Deserialize)]
struct QuadrupedSmallLateralSpec(HashMap<(QSSpecies, QSBodyType), SidedQSLateralVoxSpec>);
concatenate_tuple!(QuadrupedSmallLateralSpec);
#[derive(Deserialize)]
struct SidedQSLateralVoxSpec {
@ -1287,6 +1322,7 @@ impl QuadrupedSmallLateralSpec {
//////
#[derive(Deserialize)]
struct QuadrupedMediumCentralSpec(HashMap<(QMSpecies, QMBodyType), SidedQMCentralVoxSpec>);
concatenate_tuple!(QuadrupedMediumCentralSpec);
#[derive(Deserialize)]
struct SidedQMCentralVoxSpec {
@ -1308,6 +1344,7 @@ struct QuadrupedMediumCentralSubSpec {
#[derive(Deserialize)]
struct QuadrupedMediumLateralSpec(HashMap<(QMSpecies, QMBodyType), SidedQMLateralVoxSpec>);
concatenate_tuple!(QuadrupedMediumLateralSpec);
#[derive(Deserialize)]
struct SidedQMLateralVoxSpec {
leg_fl: QuadrupedMediumLateralSubSpec,
@ -1663,6 +1700,7 @@ impl QuadrupedMediumLateralSpec {
//////
#[derive(Deserialize)]
struct BirdMediumCentralSpec(HashMap<(BMSpecies, BMBodyType), SidedBMCentralVoxSpec>);
concatenate_tuple!(BirdMediumCentralSpec);
#[derive(Deserialize)]
struct SidedBMCentralVoxSpec {
@ -1680,6 +1718,7 @@ struct BirdMediumCentralSubSpec {
#[derive(Deserialize)]
struct BirdMediumLateralSpec(HashMap<(BMSpecies, BMBodyType), SidedBMLateralVoxSpec>);
concatenate_tuple!(BirdMediumLateralSpec);
#[derive(Deserialize)]
struct SidedBMLateralVoxSpec {
@ -1917,6 +1956,7 @@ impl BirdMediumLateralSpec {
//////
#[derive(Deserialize)]
struct TheropodCentralSpec(HashMap<(TSpecies, TBodyType), SidedTCentralVoxSpec>);
concatenate_tuple!(TheropodCentralSpec);
#[derive(Deserialize)]
struct SidedTCentralVoxSpec {
@ -1937,6 +1977,7 @@ struct TheropodCentralSubSpec {
}
#[derive(Deserialize)]
struct TheropodLateralSpec(HashMap<(TSpecies, TBodyType), SidedTLateralVoxSpec>);
concatenate_tuple!(TheropodLateralSpec);
#[derive(Deserialize)]
struct SidedTLateralVoxSpec {
@ -2247,6 +2288,7 @@ impl TheropodLateralSpec {
//////
#[derive(Deserialize)]
struct ArthropodCentralSpec(HashMap<(ASpecies, ABodyType), SidedACentralVoxSpec>);
concatenate_tuple!(ArthropodCentralSpec);
#[derive(Deserialize)]
struct SidedACentralVoxSpec {
@ -2262,6 +2304,7 @@ struct ArthropodCentralSubSpec {
}
#[derive(Deserialize)]
struct ArthropodLateralSpec(HashMap<(ASpecies, ABodyType), SidedALateralVoxSpec>);
concatenate_tuple!(ArthropodLateralSpec);
#[derive(Deserialize)]
struct SidedALateralVoxSpec {
@ -2647,6 +2690,7 @@ impl ArthropodLateralSpec {
//////
#[derive(Deserialize)]
struct FishMediumCentralSpec(HashMap<(FMSpecies, FMBodyType), SidedFMCentralVoxSpec>);
concatenate_tuple!(FishMediumCentralSpec);
#[derive(Deserialize)]
struct SidedFMCentralVoxSpec {
@ -2665,6 +2709,7 @@ struct FishMediumCentralSubSpec {
}
#[derive(Deserialize)]
struct FishMediumLateralSpec(HashMap<(FMSpecies, FMBodyType), SidedFMLateralVoxSpec>);
concatenate_tuple!(FishMediumLateralSpec);
#[derive(Deserialize)]
struct SidedFMLateralVoxSpec {
fin_l: FishMediumLateralSubSpec,
@ -2853,6 +2898,7 @@ impl FishMediumLateralSpec {
//////
#[derive(Deserialize)]
struct FishSmallCentralSpec(HashMap<(FSSpecies, FSBodyType), SidedFSCentralVoxSpec>);
concatenate_tuple!(FishSmallCentralSpec);
#[derive(Deserialize)]
struct SidedFSCentralVoxSpec {
@ -2868,6 +2914,7 @@ struct FishSmallCentralSubSpec {
}
#[derive(Deserialize)]
struct FishSmallLateralSpec(HashMap<(FSSpecies, FSBodyType), SidedFSLateralVoxSpec>);
concatenate_tuple!(FishSmallLateralSpec);
#[derive(Deserialize)]
struct SidedFSLateralVoxSpec {
fin_l: FishSmallLateralSubSpec,
@ -2998,18 +3045,25 @@ impl FishSmallLateralSpec {
#[derive(Deserialize)]
struct BipedSmallWeaponSpec(HashMap<ToolKey, ArmorVoxSpec>);
concatenate_tuple!(BipedSmallWeaponSpec);
#[derive(Deserialize)]
struct BipedSmallArmorHeadSpec(ArmorVoxSpecMap<String, ArmorVoxSpec>);
concatenate_tuple!(BipedSmallArmorHeadSpec);
#[derive(Deserialize)]
struct BipedSmallArmorHandSpec(ArmorVoxSpecMap<String, SidedArmorVoxSpec>);
concatenate_tuple!(BipedSmallArmorHandSpec);
#[derive(Deserialize)]
struct BipedSmallArmorFootSpec(ArmorVoxSpecMap<String, SidedArmorVoxSpec>);
concatenate_tuple!(BipedSmallArmorFootSpec);
#[derive(Deserialize)]
struct BipedSmallArmorChestSpec(ArmorVoxSpecMap<String, ArmorVoxSpec>);
concatenate_tuple!(BipedSmallArmorChestSpec);
#[derive(Deserialize)]
struct BipedSmallArmorPantsSpec(ArmorVoxSpecMap<String, ArmorVoxSpec>);
concatenate_tuple!(BipedSmallArmorPantsSpec);
#[derive(Deserialize)]
struct BipedSmallArmorTailSpec(ArmorVoxSpecMap<String, ArmorVoxSpec>);
concatenate_tuple!(BipedSmallArmorTailSpec);
make_vox_spec!(
biped_small::Body,
struct BipedSmallSpec {
@ -3273,6 +3327,7 @@ impl BipedSmallWeaponSpec {
//////
#[derive(Deserialize)]
struct DragonCentralSpec(HashMap<(DSpecies, DBodyType), SidedDCentralVoxSpec>);
concatenate_tuple!(DragonCentralSpec);
#[derive(Deserialize)]
struct SidedDCentralVoxSpec {
@ -3294,6 +3349,7 @@ struct DragonCentralSubSpec {
#[derive(Deserialize)]
struct DragonLateralSpec(HashMap<(DSpecies, DBodyType), SidedDLateralVoxSpec>);
concatenate_tuple!(DragonLateralSpec);
#[derive(Deserialize)]
struct SidedDLateralVoxSpec {
@ -3644,6 +3700,7 @@ impl DragonLateralSpec {
//////
#[derive(Deserialize)]
struct BirdLargeCentralSpec(HashMap<(BLASpecies, BLABodyType), SidedBLACentralVoxSpec>);
concatenate_tuple!(BirdLargeCentralSpec);
#[derive(Deserialize)]
struct SidedBLACentralVoxSpec {
@ -3664,6 +3721,7 @@ struct BirdLargeCentralSubSpec {
#[derive(Deserialize)]
struct BirdLargeLateralSpec(HashMap<(BLASpecies, BLABodyType), SidedBLALateralVoxSpec>);
concatenate_tuple!(BirdLargeLateralSpec);
#[derive(Deserialize)]
struct SidedBLALateralVoxSpec {
@ -4047,6 +4105,7 @@ impl BirdLargeLateralSpec {
//////
#[derive(Deserialize)]
struct BipedLargeCentralSpec(HashMap<(BLSpecies, BLBodyType), SidedBLCentralVoxSpec>);
concatenate_tuple!(BipedLargeCentralSpec);
#[derive(Deserialize)]
struct SidedBLCentralVoxSpec {
@ -4066,6 +4125,7 @@ struct BipedLargeCentralSubSpec {
#[derive(Deserialize)]
struct BipedLargeLateralSpec(HashMap<(BLSpecies, BLBodyType), SidedBLLateralVoxSpec>);
concatenate_tuple!(BipedLargeLateralSpec);
#[derive(Deserialize)]
struct SidedBLLateralVoxSpec {
@ -4087,8 +4147,10 @@ struct BipedLargeLateralSubSpec {
}
#[derive(Deserialize)]
struct BipedLargeMainSpec(HashMap<ToolKey, ArmorVoxSpec>);
concatenate_tuple!(BipedLargeMainSpec);
#[derive(Deserialize)]
struct BipedLargeSecondSpec(HashMap<ToolKey, ArmorVoxSpec>);
concatenate_tuple!(BipedLargeSecondSpec);
make_vox_spec!(
biped_large::Body,
struct BipedLargeSpec {
@ -4466,6 +4528,7 @@ impl BipedLargeSecondSpec {
//////
#[derive(Deserialize)]
struct GolemCentralSpec(HashMap<(GSpecies, GBodyType), SidedGCentralVoxSpec>);
concatenate_tuple!(GolemCentralSpec);
#[derive(Deserialize)]
struct SidedGCentralVoxSpec {
@ -4484,6 +4547,7 @@ struct GolemCentralSubSpec {
#[derive(Deserialize)]
struct GolemLateralSpec(HashMap<(GSpecies, GBodyType), SidedGLateralVoxSpec>);
concatenate_tuple!(GolemLateralSpec);
#[derive(Deserialize)]
struct SidedGLateralVoxSpec {
@ -4776,6 +4840,7 @@ impl GolemLateralSpec {
//////
#[derive(Deserialize)]
struct QuadrupedLowCentralSpec(HashMap<(QLSpecies, QLBodyType), SidedQLCentralVoxSpec>);
concatenate_tuple!(QuadrupedLowCentralSpec);
#[derive(Deserialize)]
struct SidedQLCentralVoxSpec {
@ -4796,6 +4861,7 @@ struct QuadrupedLowCentralSubSpec {
#[derive(Deserialize)]
struct QuadrupedLowLateralSpec(HashMap<(QLSpecies, QLBodyType), SidedQLLateralVoxSpec>);
concatenate_tuple!(QuadrupedLowLateralSpec);
#[derive(Deserialize)]
struct SidedQLLateralVoxSpec {
front_left: QuadrupedLowLateralSubSpec,
@ -5125,6 +5191,7 @@ impl ObjectCentralSpec {
(central, Vec3::from(spec.bone1.offset))
}
}
concatenate_tuple!(ObjectCentralSpec);
struct ModelWithOptionalIndex(String, u32);
@ -5162,6 +5229,7 @@ impl<'de> Deserialize<'de> for ModelWithOptionalIndex {
#[derive(Deserialize)]
struct ItemDropCentralSpec(HashMap<ItemKey, ModelWithOptionalIndex>);
concatenate_tuple!(ItemDropCentralSpec);
make_vox_spec!(
item_drop::Body,