mirror of
https://gitlab.com/veloren/veloren.git
synced 2024-08-30 18:12:32 +00:00
Merge branch 'christof/items_in_plugins' into 'master'
Load voxel, item description and body specs from plugin archives See merge request veloren/veloren!3658
This commit is contained in:
commit
bbed1d85e8
@ -27,6 +27,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
- New arena building in desert cities, suitable for PVP, also NPCs like to watch the fights too
|
||||
- The loading screen now displays status updates for singleplayer server and client initialization progress
|
||||
- New Frost Gigas attacks & AI
|
||||
- Allow plugins to add weapon and armor items
|
||||
|
||||
### Changed
|
||||
|
||||
|
2
Cargo.lock
generated
2
Cargo.lock
generated
@ -6872,10 +6872,12 @@ version = "0.10.0"
|
||||
dependencies = [
|
||||
"assets_manager",
|
||||
"dot_vox",
|
||||
"hashbrown 0.13.2",
|
||||
"image",
|
||||
"lazy_static",
|
||||
"ron 0.8.1",
|
||||
"serde",
|
||||
"tar",
|
||||
"tracing",
|
||||
"walkdir",
|
||||
"wavefront",
|
||||
|
@ -13,13 +13,16 @@ dot_vox = "5.1"
|
||||
wavefront = "0.2" # TODO: Use vertex-colors branch when we have models that have them
|
||||
image = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tar = { version = "0.4.37", optional = true }
|
||||
|
||||
# asset tweak
|
||||
serde = { workspace = true, optional = true }
|
||||
hashbrown = { workspace = true, optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
walkdir = "2.3.2"
|
||||
|
||||
[features]
|
||||
hot-reloading = ["assets_manager/hot-reloading"]
|
||||
asset_tweak = ["serde", "hot-reloading"]
|
||||
asset_tweak = ["dep:serde", "hot-reloading"]
|
||||
plugins = ["dep:serde", "dep:tar", "dep:hashbrown"]
|
||||
|
@ -4,7 +4,13 @@
|
||||
use dot_vox::DotVoxData;
|
||||
use image::DynamicImage;
|
||||
use lazy_static::lazy_static;
|
||||
use std::{borrow::Cow, path::PathBuf, sync::Arc};
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
collections::HashMap,
|
||||
hash::{BuildHasher, Hash},
|
||||
path::PathBuf,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
pub use assets_manager::{
|
||||
asset::{DirLoadable, Ron},
|
||||
@ -16,18 +22,30 @@ pub use assets_manager::{
|
||||
};
|
||||
|
||||
mod fs;
|
||||
#[cfg(feature = "plugins")] mod plugin_cache;
|
||||
#[cfg(feature = "plugins")] mod tar_source;
|
||||
mod walk;
|
||||
pub use walk::{walk_tree, Walk};
|
||||
|
||||
#[cfg(feature = "plugins")]
|
||||
lazy_static! {
|
||||
/// The HashMap where all loaded assets are stored in.
|
||||
static ref ASSETS: AssetCache<fs::FileSystem> =
|
||||
/// The HashMap where all loaded assets are stored in.
|
||||
static ref ASSETS: plugin_cache::CombinedCache = plugin_cache::CombinedCache::new().unwrap();
|
||||
}
|
||||
#[cfg(not(feature = "plugins"))]
|
||||
lazy_static! {
|
||||
/// The HashMap where all loaded assets are stored in.
|
||||
static ref ASSETS: AssetCache<fs::FileSystem> =
|
||||
AssetCache::with_source(fs::FileSystem::new().unwrap());
|
||||
}
|
||||
|
||||
#[cfg(feature = "hot-reloading")]
|
||||
pub fn start_hot_reloading() { ASSETS.enhance_hot_reloading(); }
|
||||
|
||||
// register a new plugin
|
||||
#[cfg(feature = "plugins")]
|
||||
pub fn register_tar(path: PathBuf) -> std::io::Result<()> { ASSETS.register_tar(path) }
|
||||
|
||||
pub type AssetHandle<T> = assets_manager::Handle<'static, T>;
|
||||
pub type AssetGuard<T> = assets_manager::AssetGuard<'static, T>;
|
||||
pub type AssetDirHandle<T> = assets_manager::DirHandle<'static, T>;
|
||||
@ -209,6 +227,57 @@ impl Loader<ObjAsset> for ObjAssetLoader {
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Concatenate {
|
||||
fn concatenate(self, b: Self) -> Self;
|
||||
}
|
||||
|
||||
impl<K: Eq + Hash, V, S: BuildHasher> Concatenate for HashMap<K, V, S> {
|
||||
fn concatenate(mut self, b: Self) -> Self {
|
||||
self.extend(b);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<V> Concatenate for Vec<V> {
|
||||
fn concatenate(mut self, b: Self) -> Self {
|
||||
self.extend(b);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "plugins")]
|
||||
impl<K: Eq + Hash, V, S: BuildHasher> Concatenate for hashbrown::HashMap<K, V, S> {
|
||||
fn concatenate(mut self, b: Self) -> Self {
|
||||
self.extend(b);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Concatenate> Concatenate for Ron<T> {
|
||||
fn concatenate(self, _b: Self) -> Self { todo!() }
|
||||
}
|
||||
|
||||
/// This wrapper combines several RON files from multiple sources
|
||||
pub struct MultiRon<T>(pub T);
|
||||
|
||||
impl<T: Clone> Clone for MultiRon<T> {
|
||||
fn clone(&self) -> Self { Self(self.0.clone()) }
|
||||
|
||||
fn clone_from(&mut self, source: &Self) { self.0.clone_from(&source.0) }
|
||||
}
|
||||
|
||||
#[cfg(feature = "plugins")]
|
||||
impl<T> Compound for MultiRon<T>
|
||||
where
|
||||
T: for<'de> serde::Deserialize<'de> + Send + Sync + 'static + Concatenate,
|
||||
{
|
||||
fn load(_cache: AnyCache, id: &SharedString) -> Result<Self, BoxedError> {
|
||||
ASSETS
|
||||
.combine(|cache: AnyCache| <Ron<T> as Compound>::load(cache, id).map(|r| r.0))
|
||||
.map(MultiRon)
|
||||
}
|
||||
}
|
||||
|
||||
/// Return path to repository root by searching 10 directories back
|
||||
pub fn find_root() -> Option<PathBuf> {
|
||||
std::env::current_dir().map_or(None, |path| {
|
||||
|
200
common/assets/src/plugin_cache.rs
Normal file
200
common/assets/src/plugin_cache.rs
Normal file
@ -0,0 +1,200 @@
|
||||
use std::{path::PathBuf, sync::RwLock};
|
||||
|
||||
use crate::Concatenate;
|
||||
|
||||
use super::{fs::FileSystem, tar_source::Tar, ASSETS_PATH};
|
||||
use assets_manager::{
|
||||
hot_reloading::{DynUpdateSender, EventSender},
|
||||
source::{FileContent, Source},
|
||||
AnyCache, AssetCache, BoxedError,
|
||||
};
|
||||
|
||||
struct PluginEntry {
|
||||
path: PathBuf,
|
||||
cache: AssetCache<Tar>,
|
||||
}
|
||||
|
||||
/// The location of this asset
|
||||
enum AssetSource {
|
||||
FileSystem,
|
||||
Plugin { index: usize },
|
||||
}
|
||||
|
||||
struct SourceAndContents<'a>(AssetSource, FileContent<'a>);
|
||||
|
||||
/// This source combines assets loaded from the filesystem and from plugins.
|
||||
/// It is typically used via the CombinedCache type.
|
||||
///
|
||||
/// A load will search through all sources and warn about unhandled duplicates.
|
||||
pub struct CombinedSource {
|
||||
fs: AssetCache<FileSystem>,
|
||||
plugin_list: RwLock<Vec<PluginEntry>>,
|
||||
}
|
||||
|
||||
impl CombinedSource {
|
||||
pub fn new() -> std::io::Result<Self> {
|
||||
Ok(Self {
|
||||
fs: AssetCache::with_source(FileSystem::new()?),
|
||||
plugin_list: RwLock::new(Vec::new()),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl CombinedSource {
|
||||
/// Look for an asset in all known sources
|
||||
fn read_multiple(&self, id: &str, ext: &str) -> Vec<SourceAndContents<'_>> {
|
||||
let mut result = Vec::new();
|
||||
if let Ok(file_entry) = self.fs.raw_source().read(id, ext) {
|
||||
result.push(SourceAndContents(AssetSource::FileSystem, file_entry));
|
||||
}
|
||||
for (n, p) in self.plugin_list.read().unwrap().iter().enumerate() {
|
||||
if let Ok(entry) = p.cache.raw_source().read(id, ext) {
|
||||
// the data is behind an RwLockReadGuard, so own it for returning
|
||||
result.push(SourceAndContents(
|
||||
AssetSource::Plugin { index: n },
|
||||
match entry {
|
||||
FileContent::Slice(s) => FileContent::Buffer(Vec::from(s)),
|
||||
FileContent::Buffer(b) => FileContent::Buffer(b),
|
||||
FileContent::Owned(s) => {
|
||||
FileContent::Buffer(Vec::from(s.as_ref().as_ref()))
|
||||
},
|
||||
},
|
||||
));
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
/// Return the path of a source
|
||||
fn plugin_path(&self, index: &AssetSource) -> Option<PathBuf> {
|
||||
match index {
|
||||
AssetSource::FileSystem => Some(ASSETS_PATH.clone()),
|
||||
AssetSource::Plugin { index } => self.plugin_list
|
||||
.read()
|
||||
.unwrap()
|
||||
.get(*index)
|
||||
// We don't want to keep the lock, so we clone
|
||||
.map(|plugin| plugin.path.clone()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Source for CombinedSource {
|
||||
fn read(&self, id: &str, ext: &str) -> std::io::Result<FileContent<'_>> {
|
||||
// We could shortcut on fs if we dont check for conflicts
|
||||
let mut entries = self.read_multiple(id, ext);
|
||||
if entries.is_empty() {
|
||||
Err(std::io::ErrorKind::NotFound.into())
|
||||
} else {
|
||||
if entries.len() > 1 {
|
||||
let patha = self.plugin_path(&entries[0].0);
|
||||
let pathb = self.plugin_path(&entries[1].0);
|
||||
tracing::error!("Duplicate asset {id} in {patha:?} and {pathb:?}");
|
||||
}
|
||||
// unconditionally return the first asset found
|
||||
Ok(entries.swap_remove(0).1)
|
||||
}
|
||||
}
|
||||
|
||||
fn read_dir(
|
||||
&self,
|
||||
id: &str,
|
||||
f: &mut dyn FnMut(assets_manager::source::DirEntry),
|
||||
) -> std::io::Result<()> {
|
||||
// TODO: We should combine the sources, but this isn't used in veloren
|
||||
self.fs.raw_source().read_dir(id, f)
|
||||
}
|
||||
|
||||
fn exists(&self, entry: assets_manager::source::DirEntry) -> bool {
|
||||
self.fs.raw_source().exists(entry)
|
||||
|| self
|
||||
.plugin_list
|
||||
.read()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.any(|plugin| plugin.cache.raw_source().exists(entry))
|
||||
}
|
||||
|
||||
// TODO: Enable hot reloading for plugins
|
||||
fn make_source(&self) -> Option<Box<dyn Source + Send>> { self.fs.raw_source().make_source() }
|
||||
|
||||
fn configure_hot_reloading(&self, events: EventSender) -> Result<DynUpdateSender, BoxedError> {
|
||||
self.fs.raw_source().configure_hot_reloading(events)
|
||||
}
|
||||
}
|
||||
|
||||
/// A cache combining filesystem and plugin assets
|
||||
pub struct CombinedCache(AssetCache<CombinedSource>);
|
||||
|
||||
impl CombinedCache {
|
||||
pub fn new() -> std::io::Result<Self> {
|
||||
CombinedSource::new().map(|combined_source| Self(AssetCache::with_source(combined_source)))
|
||||
}
|
||||
|
||||
/// Combine objects from filesystem and plugins
|
||||
pub fn combine<T: Concatenate>(
|
||||
&self,
|
||||
mut load_from: impl FnMut(AnyCache) -> Result<T, BoxedError>,
|
||||
) -> Result<T, BoxedError> {
|
||||
let mut result = load_from(self.0.raw_source().fs.as_any_cache());
|
||||
// Report a severe error from the filesystem asset even if later overwritten by
|
||||
// an Ok value from a plugin
|
||||
if let Err(ref fs_error) = result {
|
||||
match fs_error
|
||||
.source()
|
||||
.and_then(|error_source| error_source.downcast_ref::<std::io::Error>())
|
||||
.map(|io_error| io_error.kind())
|
||||
{
|
||||
Some(std::io::ErrorKind::NotFound) => (),
|
||||
_ => tracing::error!("Filesystem asset load {fs_error:?}"),
|
||||
}
|
||||
}
|
||||
for plugin in self.0.raw_source().plugin_list.read().unwrap().iter() {
|
||||
match load_from(plugin.cache.as_any_cache()) {
|
||||
Ok(b) => {
|
||||
result = if let Ok(a) = result {
|
||||
Ok(a.concatenate(b))
|
||||
} else {
|
||||
Ok(b)
|
||||
};
|
||||
},
|
||||
// Report any error other than NotFound
|
||||
Err(plugin_error) => {
|
||||
match plugin_error
|
||||
.source()
|
||||
.and_then(|error_source| error_source.downcast_ref::<std::io::Error>())
|
||||
.map(|io_error| io_error.kind())
|
||||
{
|
||||
Some(std::io::ErrorKind::NotFound) => (),
|
||||
_ => tracing::error!(
|
||||
"Loading from {:?} failed {plugin_error:?}",
|
||||
plugin.path
|
||||
),
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
/// Add a tar archive (a plugin) to the system.
|
||||
/// All files in that tar file become potential assets.
|
||||
pub fn register_tar(&self, path: PathBuf) -> std::io::Result<()> {
|
||||
let tar_source = Tar::from_path(&path)?;
|
||||
let cache = AssetCache::with_source(tar_source);
|
||||
self.0
|
||||
.raw_source()
|
||||
.plugin_list
|
||||
.write()
|
||||
.unwrap()
|
||||
.push(PluginEntry { path, cache });
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// Delegate all cache operations directly to the contained cache object
|
||||
impl std::ops::Deref for CombinedCache {
|
||||
type Target = AssetCache<CombinedSource>;
|
||||
|
||||
fn deref(&self) -> &Self::Target { &self.0 }
|
||||
}
|
191
common/assets/src/tar_source.rs
Normal file
191
common/assets/src/tar_source.rs
Normal file
@ -0,0 +1,191 @@
|
||||
use assets_manager::source::{DirEntry, FileContent, Source};
|
||||
use hashbrown::HashMap;
|
||||
use tar::EntryType;
|
||||
|
||||
use std::{
|
||||
fmt,
|
||||
fs::File,
|
||||
hash, io,
|
||||
os::unix::prelude::FileExt,
|
||||
path::{self, Path, PathBuf},
|
||||
};
|
||||
|
||||
// Derived from the zip source in the assets_manager crate
|
||||
|
||||
#[derive(Clone, Hash, PartialEq, Eq)]
|
||||
struct FileDesc(String, String);
|
||||
|
||||
/// This hack enables us to use a `(&str, &str)` as a key for an HashMap without
|
||||
/// allocating a `FileDesc`
|
||||
trait FileKey {
|
||||
fn id(&self) -> &str;
|
||||
fn ext(&self) -> &str;
|
||||
}
|
||||
|
||||
impl FileKey for FileDesc {
|
||||
fn id(&self) -> &str { &self.0 }
|
||||
|
||||
fn ext(&self) -> &str { &self.1 }
|
||||
}
|
||||
|
||||
impl FileKey for (&'_ str, &'_ str) {
|
||||
fn id(&self) -> &str { self.0 }
|
||||
|
||||
fn ext(&self) -> &str { self.1 }
|
||||
}
|
||||
|
||||
impl<'a> std::borrow::Borrow<dyn FileKey + 'a> for FileDesc {
|
||||
fn borrow(&self) -> &(dyn FileKey + 'a) { self }
|
||||
}
|
||||
|
||||
impl PartialEq for dyn FileKey + '_ {
|
||||
fn eq(&self, other: &Self) -> bool { self.id() == other.id() && self.ext() == other.ext() }
|
||||
}
|
||||
|
||||
impl Eq for dyn FileKey + '_ {}
|
||||
|
||||
impl hash::Hash for dyn FileKey + '_ {
|
||||
fn hash<H: hash::Hasher>(&self, hasher: &mut H) {
|
||||
self.id().hash(hasher);
|
||||
self.ext().hash(hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for FileDesc {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.debug_struct("FileDesc")
|
||||
.field("id", &self.0)
|
||||
.field("ext", &self.1)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl FileDesc {
|
||||
fn as_dir_entry(&self) -> DirEntry { DirEntry::File(&self.0, &self.1) }
|
||||
}
|
||||
|
||||
/// Register a file of an archive in maps, components in asset ids are separated
|
||||
/// by points
|
||||
fn register_file(
|
||||
path: &Path,
|
||||
position: u64,
|
||||
length: usize,
|
||||
files: &mut HashMap<FileDesc, (u64, usize)>,
|
||||
dirs: &mut HashMap<String, Vec<FileDesc>>,
|
||||
) {
|
||||
// Parse the path and register it.
|
||||
let mut parent_id = String::default();
|
||||
// The closure is used as a cheap `try` block.
|
||||
let unsupported_path = (|| {
|
||||
let parent = path.parent()?;
|
||||
for comp in parent.components() {
|
||||
match comp {
|
||||
path::Component::Normal(s) => {
|
||||
let segment = s.to_str()?;
|
||||
// Reject paths with extensions
|
||||
if segment.contains('.') {
|
||||
return None;
|
||||
}
|
||||
if !parent_id.is_empty() {
|
||||
parent_id.push('.');
|
||||
}
|
||||
parent_id.push_str(segment);
|
||||
},
|
||||
// Reject paths with non-name components
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
|
||||
let file_id = parent_id.clone() + "." + path.file_stem()?.to_str()?;
|
||||
// Register the file in the maps.
|
||||
let ext = path.extension().unwrap_or_default().to_str()?.to_owned();
|
||||
let desc = FileDesc(file_id, ext);
|
||||
files.insert(desc.clone(), (position, length));
|
||||
dirs.entry(parent_id).or_default().push(desc);
|
||||
|
||||
Some(())
|
||||
})()
|
||||
.is_none();
|
||||
if unsupported_path {
|
||||
tracing::error!("Unsupported path in tar archive: {path:?}");
|
||||
}
|
||||
}
|
||||
|
||||
// We avoid the extra dependency of sync_file introduced by Zip here by opening
|
||||
// the file for each read
|
||||
struct Backend(PathBuf);
|
||||
|
||||
impl Backend {
|
||||
fn read(&self, pos: u64, len: usize) -> std::io::Result<Vec<u8>> {
|
||||
File::open(self.0.clone()).and_then(|file| {
|
||||
let mut result = vec![0; len];
|
||||
file.read_exact_at(result.as_mut_slice(), pos)
|
||||
.map(move |_num_bytes| result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Tar {
|
||||
files: HashMap<FileDesc, (u64, usize)>,
|
||||
dirs: HashMap<String, Vec<FileDesc>>,
|
||||
backend: Backend,
|
||||
}
|
||||
|
||||
impl Tar {
|
||||
/// Creates a `Tar` from a file
|
||||
pub fn from_path(path: &Path) -> io::Result<Tar> {
|
||||
let file = File::open(path)?;
|
||||
let mut tar = tar::Archive::new(file);
|
||||
let contents = tar
|
||||
.entries()
|
||||
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
|
||||
let mut files = HashMap::with_capacity(contents.size_hint().0);
|
||||
let mut dirs = HashMap::new();
|
||||
for entry in contents.flatten() {
|
||||
if matches!(entry.header().entry_type(), EntryType::Regular) {
|
||||
register_file(
|
||||
entry.path().map_err(io::Error::other)?.as_ref(),
|
||||
entry.raw_file_position(),
|
||||
entry.size() as usize,
|
||||
&mut files,
|
||||
&mut dirs,
|
||||
);
|
||||
}
|
||||
}
|
||||
Ok(Tar {
|
||||
files,
|
||||
dirs,
|
||||
backend: Backend(path.to_path_buf()),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Source for Tar {
|
||||
fn read(&self, id: &str, ext: &str) -> io::Result<FileContent> {
|
||||
let key: &dyn FileKey = &(id, ext);
|
||||
let id = *self.files.get(key).ok_or(io::ErrorKind::NotFound)?;
|
||||
self.backend.read(id.0, id.1).map(FileContent::Buffer)
|
||||
}
|
||||
|
||||
fn read_dir(&self, id: &str, f: &mut dyn FnMut(DirEntry)) -> io::Result<()> {
|
||||
let dir = self.dirs.get(id).ok_or(io::ErrorKind::NotFound)?;
|
||||
dir.iter().map(FileDesc::as_dir_entry).for_each(f);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn exists(&self, entry: DirEntry) -> bool {
|
||||
match entry {
|
||||
DirEntry::File(id, ext) => self.files.contains_key(&(id, ext) as &dyn FileKey),
|
||||
DirEntry::Directory(id) => self.dirs.contains_key(id),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Tar {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("Tar")
|
||||
.field("files", &self.files)
|
||||
.field("dirs", &self.dirs)
|
||||
.finish()
|
||||
}
|
||||
}
|
@ -196,8 +196,14 @@ impl PluginMgr {
|
||||
.unwrap_or(false)
|
||||
{
|
||||
info!("Loading plugin at {:?}", entry.path());
|
||||
Plugin::from_reader(fs::File::open(entry.path()).map_err(PluginError::Io)?)
|
||||
.map(Some)
|
||||
Plugin::from_reader(fs::File::open(entry.path()).map_err(PluginError::Io)?).map(
|
||||
|plugin| {
|
||||
if let Err(e) = common::assets::register_tar(entry.path()) {
|
||||
error!("Plugin {:?} tar error {e:?}", entry.path());
|
||||
}
|
||||
Some(plugin)
|
||||
},
|
||||
)
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
|
@ -27,7 +27,7 @@ singleplayer = ["server"]
|
||||
simd = ["vek/platform_intrinsics"]
|
||||
tracy = ["common-frontend/tracy", "client/tracy"]
|
||||
tracy-memory = ["tracy"] # enables heap profiling with tracy
|
||||
plugins = ["client/plugins"]
|
||||
plugins = ["client/plugins", "common-assets/plugins"]
|
||||
egui-ui = ["voxygen-egui", "egui", "egui_wgpu_backend", "egui_winit_platform"]
|
||||
shaderc-from-source = ["shaderc/build-from-source"]
|
||||
discord = ["discord-sdk"]
|
||||
|
@ -2,7 +2,7 @@ use super::cache::{
|
||||
FigureKey, FigureModelEntryFuture, ModelEntryFuture, TerrainModelEntryFuture, ToolKey,
|
||||
};
|
||||
use common::{
|
||||
assets::{self, AssetExt, AssetHandle, DotVoxAsset, ReloadWatcher, Ron},
|
||||
assets::{self, AssetExt, AssetHandle, Concatenate, DotVoxAsset, MultiRon, ReloadWatcher},
|
||||
comp::{
|
||||
arthropod::{self, BodyType as ABodyType, Species as ASpecies},
|
||||
biped_large::{self, BodyType as BLBodyType, Species as BLSpecies},
|
||||
@ -143,7 +143,7 @@ macro_rules! make_vox_spec {
|
||||
) => {
|
||||
#[derive(Clone)]
|
||||
pub struct $Spec {
|
||||
$( $field: AssetHandle<Ron<$ty>>, )*
|
||||
$( $field: AssetHandle<MultiRon<$ty>>, )*
|
||||
}
|
||||
|
||||
impl assets::Compound for $Spec {
|
||||
@ -178,6 +178,13 @@ macro_rules! make_vox_spec {
|
||||
}
|
||||
}
|
||||
}
|
||||
macro_rules! impl_concatenate_for_wrapper {
|
||||
($name:ty) => {
|
||||
impl Concatenate for $name {
|
||||
fn concatenate(self, b: Self) -> Self { Self(self.0.concatenate(b.0)) }
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// All offsets should be relative to an initial origin that doesn't change when
|
||||
// combining segments
|
||||
@ -259,6 +266,10 @@ impl HumColorSpec {
|
||||
}
|
||||
}
|
||||
|
||||
impl Concatenate for HumColorSpec {
|
||||
fn concatenate(self, _b: Self) -> Self { todo!("Can't concatenate HumColorSpec") }
|
||||
}
|
||||
|
||||
// All reliant on humanoid::Species and humanoid::BodyType
|
||||
#[derive(Deserialize)]
|
||||
struct HumHeadSubSpec {
|
||||
@ -364,6 +375,7 @@ impl HumHeadSpec {
|
||||
)
|
||||
}
|
||||
}
|
||||
impl_concatenate_for_wrapper!(HumHeadSpec);
|
||||
|
||||
// Armor aspects should be in the same order, top to bottom.
|
||||
// These seem overly split up, but wanted to keep the armor seperated
|
||||
@ -376,32 +388,53 @@ where
|
||||
default: S,
|
||||
map: HashMap<K, S>,
|
||||
}
|
||||
impl<K: Hash + Eq, S> Concatenate for ArmorVoxSpecMap<K, S> {
|
||||
fn concatenate(self, b: Self) -> Self {
|
||||
Self {
|
||||
default: self.default,
|
||||
map: self.map.concatenate(b.map),
|
||||
}
|
||||
}
|
||||
}
|
||||
#[derive(Deserialize)]
|
||||
struct HumArmorShoulderSpec(ArmorVoxSpecMap<String, SidedArmorVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(HumArmorShoulderSpec);
|
||||
#[derive(Deserialize)]
|
||||
struct HumArmorChestSpec(ArmorVoxSpecMap<String, ArmorVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(HumArmorChestSpec);
|
||||
#[derive(Deserialize)]
|
||||
struct HumArmorHandSpec(ArmorVoxSpecMap<String, SidedArmorVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(HumArmorHandSpec);
|
||||
#[derive(Deserialize)]
|
||||
struct HumArmorBeltSpec(ArmorVoxSpecMap<String, ArmorVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(HumArmorBeltSpec);
|
||||
#[derive(Deserialize)]
|
||||
struct HumArmorBackSpec(ArmorVoxSpecMap<String, ArmorVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(HumArmorBackSpec);
|
||||
#[derive(Deserialize)]
|
||||
struct HumArmorPantsSpec(ArmorVoxSpecMap<String, ArmorVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(HumArmorPantsSpec);
|
||||
#[derive(Deserialize)]
|
||||
struct HumArmorFootSpec(ArmorVoxSpecMap<String, ArmorVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(HumArmorFootSpec);
|
||||
#[derive(Deserialize)]
|
||||
struct HumMainWeaponSpec(HashMap<ToolKey, ArmorVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(HumMainWeaponSpec);
|
||||
#[derive(Deserialize)]
|
||||
struct HumModularComponentSpec(HashMap<String, ModularComponentSpec>);
|
||||
impl_concatenate_for_wrapper!(HumModularComponentSpec);
|
||||
#[derive(Deserialize)]
|
||||
struct HumArmorLanternSpec(ArmorVoxSpecMap<String, ArmorVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(HumArmorLanternSpec);
|
||||
#[derive(Deserialize)]
|
||||
struct HumArmorGliderSpec(ArmorVoxSpecMap<String, ArmorVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(HumArmorGliderSpec);
|
||||
#[derive(Deserialize)]
|
||||
struct HumArmorHeadSpec(ArmorVoxSpecMap<(Species, BodyType, String), ArmorVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(HumArmorHeadSpec);
|
||||
#[derive(Deserialize)]
|
||||
struct HumArmorTabardSpec(ArmorVoxSpecMap<String, ArmorVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(HumArmorTabardSpec);
|
||||
|
||||
make_vox_spec!(
|
||||
Body,
|
||||
@ -1072,6 +1105,7 @@ fn mesh_hold() -> BoneMeshes {
|
||||
//////
|
||||
#[derive(Deserialize)]
|
||||
struct QuadrupedSmallCentralSpec(HashMap<(QSSpecies, QSBodyType), SidedQSCentralVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(QuadrupedSmallCentralSpec);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SidedQSCentralVoxSpec {
|
||||
@ -1089,6 +1123,7 @@ struct QuadrupedSmallCentralSubSpec {
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct QuadrupedSmallLateralSpec(HashMap<(QSSpecies, QSBodyType), SidedQSLateralVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(QuadrupedSmallLateralSpec);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SidedQSLateralVoxSpec {
|
||||
@ -1287,6 +1322,7 @@ impl QuadrupedSmallLateralSpec {
|
||||
//////
|
||||
#[derive(Deserialize)]
|
||||
struct QuadrupedMediumCentralSpec(HashMap<(QMSpecies, QMBodyType), SidedQMCentralVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(QuadrupedMediumCentralSpec);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SidedQMCentralVoxSpec {
|
||||
@ -1308,6 +1344,7 @@ struct QuadrupedMediumCentralSubSpec {
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct QuadrupedMediumLateralSpec(HashMap<(QMSpecies, QMBodyType), SidedQMLateralVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(QuadrupedMediumLateralSpec);
|
||||
#[derive(Deserialize)]
|
||||
struct SidedQMLateralVoxSpec {
|
||||
leg_fl: QuadrupedMediumLateralSubSpec,
|
||||
@ -1663,6 +1700,7 @@ impl QuadrupedMediumLateralSpec {
|
||||
//////
|
||||
#[derive(Deserialize)]
|
||||
struct BirdMediumCentralSpec(HashMap<(BMSpecies, BMBodyType), SidedBMCentralVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(BirdMediumCentralSpec);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SidedBMCentralVoxSpec {
|
||||
@ -1680,6 +1718,7 @@ struct BirdMediumCentralSubSpec {
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct BirdMediumLateralSpec(HashMap<(BMSpecies, BMBodyType), SidedBMLateralVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(BirdMediumLateralSpec);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SidedBMLateralVoxSpec {
|
||||
@ -1917,6 +1956,7 @@ impl BirdMediumLateralSpec {
|
||||
//////
|
||||
#[derive(Deserialize)]
|
||||
struct TheropodCentralSpec(HashMap<(TSpecies, TBodyType), SidedTCentralVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(TheropodCentralSpec);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SidedTCentralVoxSpec {
|
||||
@ -1937,6 +1977,7 @@ struct TheropodCentralSubSpec {
|
||||
}
|
||||
#[derive(Deserialize)]
|
||||
struct TheropodLateralSpec(HashMap<(TSpecies, TBodyType), SidedTLateralVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(TheropodLateralSpec);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SidedTLateralVoxSpec {
|
||||
@ -2247,6 +2288,7 @@ impl TheropodLateralSpec {
|
||||
//////
|
||||
#[derive(Deserialize)]
|
||||
struct ArthropodCentralSpec(HashMap<(ASpecies, ABodyType), SidedACentralVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(ArthropodCentralSpec);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SidedACentralVoxSpec {
|
||||
@ -2262,6 +2304,7 @@ struct ArthropodCentralSubSpec {
|
||||
}
|
||||
#[derive(Deserialize)]
|
||||
struct ArthropodLateralSpec(HashMap<(ASpecies, ABodyType), SidedALateralVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(ArthropodLateralSpec);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SidedALateralVoxSpec {
|
||||
@ -2647,6 +2690,7 @@ impl ArthropodLateralSpec {
|
||||
//////
|
||||
#[derive(Deserialize)]
|
||||
struct FishMediumCentralSpec(HashMap<(FMSpecies, FMBodyType), SidedFMCentralVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(FishMediumCentralSpec);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SidedFMCentralVoxSpec {
|
||||
@ -2665,6 +2709,7 @@ struct FishMediumCentralSubSpec {
|
||||
}
|
||||
#[derive(Deserialize)]
|
||||
struct FishMediumLateralSpec(HashMap<(FMSpecies, FMBodyType), SidedFMLateralVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(FishMediumLateralSpec);
|
||||
#[derive(Deserialize)]
|
||||
struct SidedFMLateralVoxSpec {
|
||||
fin_l: FishMediumLateralSubSpec,
|
||||
@ -2853,6 +2898,7 @@ impl FishMediumLateralSpec {
|
||||
//////
|
||||
#[derive(Deserialize)]
|
||||
struct FishSmallCentralSpec(HashMap<(FSSpecies, FSBodyType), SidedFSCentralVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(FishSmallCentralSpec);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SidedFSCentralVoxSpec {
|
||||
@ -2868,6 +2914,7 @@ struct FishSmallCentralSubSpec {
|
||||
}
|
||||
#[derive(Deserialize)]
|
||||
struct FishSmallLateralSpec(HashMap<(FSSpecies, FSBodyType), SidedFSLateralVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(FishSmallLateralSpec);
|
||||
#[derive(Deserialize)]
|
||||
struct SidedFSLateralVoxSpec {
|
||||
fin_l: FishSmallLateralSubSpec,
|
||||
@ -2998,18 +3045,25 @@ impl FishSmallLateralSpec {
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct BipedSmallWeaponSpec(HashMap<ToolKey, ArmorVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(BipedSmallWeaponSpec);
|
||||
#[derive(Deserialize)]
|
||||
struct BipedSmallArmorHeadSpec(ArmorVoxSpecMap<String, ArmorVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(BipedSmallArmorHeadSpec);
|
||||
#[derive(Deserialize)]
|
||||
struct BipedSmallArmorHandSpec(ArmorVoxSpecMap<String, SidedArmorVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(BipedSmallArmorHandSpec);
|
||||
#[derive(Deserialize)]
|
||||
struct BipedSmallArmorFootSpec(ArmorVoxSpecMap<String, SidedArmorVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(BipedSmallArmorFootSpec);
|
||||
#[derive(Deserialize)]
|
||||
struct BipedSmallArmorChestSpec(ArmorVoxSpecMap<String, ArmorVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(BipedSmallArmorChestSpec);
|
||||
#[derive(Deserialize)]
|
||||
struct BipedSmallArmorPantsSpec(ArmorVoxSpecMap<String, ArmorVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(BipedSmallArmorPantsSpec);
|
||||
#[derive(Deserialize)]
|
||||
struct BipedSmallArmorTailSpec(ArmorVoxSpecMap<String, ArmorVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(BipedSmallArmorTailSpec);
|
||||
make_vox_spec!(
|
||||
biped_small::Body,
|
||||
struct BipedSmallSpec {
|
||||
@ -3273,6 +3327,7 @@ impl BipedSmallWeaponSpec {
|
||||
//////
|
||||
#[derive(Deserialize)]
|
||||
struct DragonCentralSpec(HashMap<(DSpecies, DBodyType), SidedDCentralVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(DragonCentralSpec);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SidedDCentralVoxSpec {
|
||||
@ -3294,6 +3349,7 @@ struct DragonCentralSubSpec {
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct DragonLateralSpec(HashMap<(DSpecies, DBodyType), SidedDLateralVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(DragonLateralSpec);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SidedDLateralVoxSpec {
|
||||
@ -3644,6 +3700,7 @@ impl DragonLateralSpec {
|
||||
//////
|
||||
#[derive(Deserialize)]
|
||||
struct BirdLargeCentralSpec(HashMap<(BLASpecies, BLABodyType), SidedBLACentralVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(BirdLargeCentralSpec);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SidedBLACentralVoxSpec {
|
||||
@ -3664,6 +3721,7 @@ struct BirdLargeCentralSubSpec {
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct BirdLargeLateralSpec(HashMap<(BLASpecies, BLABodyType), SidedBLALateralVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(BirdLargeLateralSpec);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SidedBLALateralVoxSpec {
|
||||
@ -4047,6 +4105,7 @@ impl BirdLargeLateralSpec {
|
||||
//////
|
||||
#[derive(Deserialize)]
|
||||
struct BipedLargeCentralSpec(HashMap<(BLSpecies, BLBodyType), SidedBLCentralVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(BipedLargeCentralSpec);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SidedBLCentralVoxSpec {
|
||||
@ -4066,6 +4125,7 @@ struct BipedLargeCentralSubSpec {
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct BipedLargeLateralSpec(HashMap<(BLSpecies, BLBodyType), SidedBLLateralVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(BipedLargeLateralSpec);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SidedBLLateralVoxSpec {
|
||||
@ -4087,8 +4147,10 @@ struct BipedLargeLateralSubSpec {
|
||||
}
|
||||
#[derive(Deserialize)]
|
||||
struct BipedLargeMainSpec(HashMap<ToolKey, ArmorVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(BipedLargeMainSpec);
|
||||
#[derive(Deserialize)]
|
||||
struct BipedLargeSecondSpec(HashMap<ToolKey, ArmorVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(BipedLargeSecondSpec);
|
||||
make_vox_spec!(
|
||||
biped_large::Body,
|
||||
struct BipedLargeSpec {
|
||||
@ -4466,6 +4528,7 @@ impl BipedLargeSecondSpec {
|
||||
//////
|
||||
#[derive(Deserialize)]
|
||||
struct GolemCentralSpec(HashMap<(GSpecies, GBodyType), SidedGCentralVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(GolemCentralSpec);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SidedGCentralVoxSpec {
|
||||
@ -4484,6 +4547,7 @@ struct GolemCentralSubSpec {
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct GolemLateralSpec(HashMap<(GSpecies, GBodyType), SidedGLateralVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(GolemLateralSpec);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SidedGLateralVoxSpec {
|
||||
@ -4776,6 +4840,7 @@ impl GolemLateralSpec {
|
||||
//////
|
||||
#[derive(Deserialize)]
|
||||
struct QuadrupedLowCentralSpec(HashMap<(QLSpecies, QLBodyType), SidedQLCentralVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(QuadrupedLowCentralSpec);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SidedQLCentralVoxSpec {
|
||||
@ -4796,6 +4861,7 @@ struct QuadrupedLowCentralSubSpec {
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct QuadrupedLowLateralSpec(HashMap<(QLSpecies, QLBodyType), SidedQLLateralVoxSpec>);
|
||||
impl_concatenate_for_wrapper!(QuadrupedLowLateralSpec);
|
||||
#[derive(Deserialize)]
|
||||
struct SidedQLLateralVoxSpec {
|
||||
front_left: QuadrupedLowLateralSubSpec,
|
||||
@ -5125,6 +5191,7 @@ impl ObjectCentralSpec {
|
||||
(central, Vec3::from(spec.bone1.offset))
|
||||
}
|
||||
}
|
||||
impl_concatenate_for_wrapper!(ObjectCentralSpec);
|
||||
|
||||
struct ModelWithOptionalIndex(String, u32);
|
||||
|
||||
@ -5162,6 +5229,7 @@ impl<'de> Deserialize<'de> for ModelWithOptionalIndex {
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct ItemDropCentralSpec(HashMap<ItemKey, ModelWithOptionalIndex>);
|
||||
impl_concatenate_for_wrapper!(ItemDropCentralSpec);
|
||||
|
||||
make_vox_spec!(
|
||||
item_drop::Body,
|
||||
|
Loading…
Reference in New Issue
Block a user