diff --git a/Cargo.lock b/Cargo.lock index 5d10af247c..3f61e21c5e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -316,9 +316,9 @@ dependencies = [ [[package]] name = "assets_manager" -version = "0.10.2" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f00425f4c1f3349b35daf0a73477249f6574fe89f4b9d76aca0b2a1356886b3b" +checksum = "3725d3fcae7767855256e1f627df67ac8ca675c55fde1bf784d5669d1ddc7261" dependencies = [ "ab_glyph", "ahash 0.8.7", @@ -326,10 +326,11 @@ dependencies = [ "crossbeam-channel", "log", "notify", - "parking_lot 0.12.1", "ron 0.8.1", "serde", "serde_json", + "sync_file", + "tar", ] [[package]] @@ -6208,6 +6209,15 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "sync_file" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a49b72df45d9c2d5fbb13b02c97437a3175d8c9860297297597d3ed715e0f046" +dependencies = [ + "wasi 0.11.0+wasi-snapshot-preview1", +] + [[package]] name = "sync_wrapper" version = "0.1.2" @@ -7008,7 +7018,6 @@ dependencies = [ "lazy_static", "ron 0.8.1", "serde", - "tar", "tracing", "walkdir", "wavefront", diff --git a/client/i18n/src/lib.rs b/client/i18n/src/lib.rs index 774c94df00..74021a0ff8 100644 --- a/client/i18n/src/lib.rs +++ b/client/i18n/src/lib.rs @@ -16,7 +16,9 @@ use hashbrown::HashMap; use serde::{Deserialize, Serialize}; use std::{borrow::Cow, io}; -use assets::{source::DirEntry, AssetExt, AssetGuard, AssetHandle, ReloadWatcher, SharedString}; +use assets::{ + source::DirEntry, AssetExt, AssetHandle, AssetReadGuard, ReloadWatcher, SharedString, +}; use common_assets as assets; use common_i18n::{Content, LocalizationArg}; use tracing::warn; @@ -167,7 +169,7 @@ impl assets::Compound for Language { let mut bundle = FluentBundle::new_concurrent(vec![lang_id]); // Here go dragons - for id in cache.load_dir::(path, true)?.ids() { + for id in cache.load_rec_dir::(path)?.read().ids() { match cache.load(id) { Ok(handle) => { let source: &raw::Resource = &handle.read(); @@ -229,8 +231,8 @@ pub type Localization = LocalizationGuard; /// RAII guard returned from [`LocalizationHandle::read()`], resembles /// [`AssetGuard`] pub struct LocalizationGuard { - active: AssetGuard, - fallback: Option>, + active: AssetReadGuard, + fallback: Option>, } impl LocalizationGuard { @@ -592,8 +594,9 @@ struct LocalizationList(Vec); impl assets::Compound for LocalizationList { fn load(cache: assets::AnyCache, specifier: &SharedString) -> Result { // List language directories - let languages = assets::load_dir::(specifier, false) + let languages = assets::load_rec_dir::(specifier) .unwrap_or_else(|e| panic!("Failed to get manifests from {}: {:?}", specifier, e)) + .read() .ids() .filter_map(|spec| cache.load::(spec).ok()) .map(|localization| localization.read().metadata.clone()) diff --git a/common/assets/Cargo.toml b/common/assets/Cargo.toml index d4a604ec0b..eb1069b4bf 100644 --- a/common/assets/Cargo.toml +++ b/common/assets/Cargo.toml @@ -7,13 +7,12 @@ version = "0.10.0" [dependencies] lazy_static = { workspace = true } -assets_manager = {version = "0.10", features = ["bincode", "ron", "json"]} +assets_manager = { version = "0.11.1", features = ["bincode", "ron", "json"] } ron = { workspace = true } dot_vox = "5.1" wavefront = "0.2" # TODO: Use vertex-colors branch when we have models that have them image = { workspace = true } tracing = { workspace = true } -tar = { version = "0.4.37", optional = true } hashbrown = { workspace = true } # asset tweak @@ -25,4 +24,4 @@ walkdir = "2.3.2" [features] hot-reloading = ["assets_manager/hot-reloading"] asset_tweak = ["dep:serde", "hot-reloading"] -plugins = ["dep:serde", "dep:tar"] +plugins = ["dep:serde", "assets_manager/tar"] diff --git a/common/assets/src/fs.rs b/common/assets/src/fs.rs index cbb05871f8..26085937e4 100644 --- a/common/assets/src/fs.rs +++ b/common/assets/src/fs.rs @@ -1,7 +1,7 @@ use std::{fs, io}; use assets_manager::{ - hot_reloading::{DynUpdateSender, EventSender, FsWatcherBuilder}, + hot_reloading::{EventSender, FsWatcherBuilder}, source::{DirEntry, FileContent, FileSystem as RawFs, Source}, BoxedError, }; @@ -94,7 +94,7 @@ impl Source for FileSystem { fn make_source(&self) -> Option> { Some(Box::new(self.clone())) } - fn configure_hot_reloading(&self, events: EventSender) -> Result { + fn configure_hot_reloading(&self, events: EventSender) -> Result<(), BoxedError> { let mut builder = FsWatcherBuilder::new()?; if let Some(dir) = &self.override_dir { @@ -102,6 +102,7 @@ impl Source for FileSystem { } builder.watch(self.default.root().to_owned())?; - Ok(builder.build(events)) + builder.build(events); + Ok(()) } } diff --git a/common/assets/src/lib.rs b/common/assets/src/lib.rs index f6368a20c9..8d25e7a650 100644 --- a/common/assets/src/lib.rs +++ b/common/assets/src/lib.rs @@ -25,7 +25,6 @@ pub use assets_manager::{ mod fs; #[cfg(feature = "plugins")] mod plugin_cache; -#[cfg(feature = "plugins")] mod tar_source; mod walk; pub use walk::{walk_tree, Walk}; @@ -48,9 +47,9 @@ pub fn start_hot_reloading() { ASSETS.enhance_hot_reloading(); } #[cfg(feature = "plugins")] pub fn register_tar(path: PathBuf) -> std::io::Result<()> { ASSETS.register_tar(path) } -pub type AssetHandle = assets_manager::Handle<'static, T>; -pub type AssetGuard = assets_manager::AssetGuard<'static, T>; -pub type AssetDirHandle = assets_manager::DirHandle<'static, T>; +pub type AssetHandle = &'static assets_manager::Handle; +pub type AssetReadGuard = assets_manager::AssetReadGuard<'static, T>; +pub type AssetDirHandle = AssetHandle>; pub type ReloadWatcher = assets_manager::ReloadWatcher<'static>; /// The Asset trait, which is implemented by all structures that have their data @@ -71,7 +70,7 @@ pub trait AssetExt: Sized + Send + Sync + 'static { where Self: Clone, { - Self::load(specifier).map(AssetHandle::cloned) + Self::load(specifier).map(|h| h.cloned()) } fn load_or_insert_with( @@ -143,7 +142,7 @@ pub trait CacheCombined<'a> { fn load_and_combine( self, id: &str, - ) -> Result, BoxedError>; + ) -> Result<&'a assets_manager::Handle, BoxedError>; } /// Loads directory and all files in it @@ -154,39 +153,9 @@ pub trait CacheCombined<'a> { /// /// When loading a directory recursively, directories that can't be read are /// ignored. -pub fn load_dir( - specifier: &str, - recursive: bool, -) -> Result, Error> { +pub fn load_rec_dir(specifier: &str) -> Result, Error> { let specifier = specifier.strip_suffix(".*").unwrap_or(specifier); - ASSETS.load_dir(specifier, recursive) -} - -/// Loads directory and all files in it -/// -/// # Panics -/// 1) If can't load directory (filesystem errors) -/// 2) If file can't be loaded (parsing problem) -#[track_caller] -pub fn read_expect_dir( - specifier: &str, - recursive: bool, -) -> impl Iterator> { - #[track_caller] - #[cold] - fn expect_failed(err: Error) -> ! { - panic!( - "Failed loading directory: {} (error={:?})", - err.id(), - err.reason() - ) - } - - // Avoid using `unwrap_or_else` to avoid breaking `#[track_caller]` - match load_dir::(specifier, recursive) { - Ok(dir) => dir.ids().map(|entry| T::load_expect(entry).read()), - Err(err) => expect_failed(err), - } + ASSETS.load_rec_dir(specifier) } impl AssetExt for T { @@ -203,7 +172,7 @@ impl<'a> CacheCombined<'a> for AnyCache<'a> { fn load_and_combine( self, specifier: &str, - ) -> Result, BoxedError> { + ) -> Result<&'a assets_manager::Handle, BoxedError> { #[cfg(feature = "plugins")] { self.get_cached(specifier).map_or_else( diff --git a/common/assets/src/plugin_cache.rs b/common/assets/src/plugin_cache.rs index 0c4ee9eb01..8603d725c5 100644 --- a/common/assets/src/plugin_cache.rs +++ b/common/assets/src/plugin_cache.rs @@ -2,10 +2,10 @@ use std::{path::PathBuf, sync::RwLock}; use crate::Concatenate; -use super::{fs::FileSystem, tar_source::Tar, ASSETS_PATH}; +use super::{fs::FileSystem, ASSETS_PATH}; use assets_manager::{ - hot_reloading::{DynUpdateSender, EventSender}, - source::{FileContent, Source}, + hot_reloading::EventSender, + source::{FileContent, Source, Tar}, AnyCache, AssetCache, BoxedError, }; @@ -118,7 +118,7 @@ impl Source for CombinedSource { // TODO: Enable hot reloading for plugins fn make_source(&self) -> Option> { self.fs.raw_source().make_source() } - fn configure_hot_reloading(&self, events: EventSender) -> Result { + fn configure_hot_reloading(&self, events: EventSender) -> Result<(), BoxedError> { self.fs.raw_source().configure_hot_reloading(events) } } @@ -180,7 +180,7 @@ impl CombinedCache { /// Add a tar archive (a plugin) to the system. /// All files in that tar file become potential assets. pub fn register_tar(&self, path: PathBuf) -> std::io::Result<()> { - let tar_source = Tar::from_path(&path)?; + let tar_source = Tar::open(&path)?; let cache = AssetCache::with_source(tar_source); self.0 .raw_source() diff --git a/common/assets/src/tar_source.rs b/common/assets/src/tar_source.rs deleted file mode 100644 index 316cdce63c..0000000000 --- a/common/assets/src/tar_source.rs +++ /dev/null @@ -1,159 +0,0 @@ -use assets_manager::source::{DirEntry, FileContent, Source}; -use hashbrown::HashMap; -use tar::EntryType; - -use std::{ - fmt, - fs::File, - io::{self, Read, Seek, SeekFrom}, - path::{self, Path, PathBuf}, -}; - -// Derived from the zip source in the assets_manager crate - -#[derive(Clone, Hash, PartialEq, Eq)] -struct FileDesc(String, String); - -impl hashbrown::Equivalent for (&str, &str) { - fn equivalent(&self, key: &FileDesc) -> bool { self.0 == key.0 && self.1 == key.1 } -} - -impl fmt::Debug for FileDesc { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("FileDesc") - .field("id", &self.0) - .field("ext", &self.1) - .finish() - } -} - -impl FileDesc { - fn as_dir_entry(&self) -> DirEntry { DirEntry::File(&self.0, &self.1) } -} - -/// Register a file of an archive in maps, components in asset ids are separated -/// by points -fn register_file( - path: &Path, - position: u64, - length: usize, - files: &mut HashMap, - dirs: &mut HashMap>, -) { - // Parse the path and register it. - let mut parent_id = String::default(); - // The closure is used as a cheap `try` block. - let unsupported_path = (|| { - let parent = path.parent()?; - for comp in parent.components() { - match comp { - path::Component::Normal(s) => { - let segment = s.to_str()?; - // Reject paths with extensions - if segment.contains('.') { - return None; - } - if !parent_id.is_empty() { - parent_id.push('.'); - } - parent_id.push_str(segment); - }, - // Reject paths with non-name components - _ => return None, - } - } - - let file_id = parent_id.clone() + "." + path.file_stem()?.to_str()?; - // Register the file in the maps. - let ext = path.extension().unwrap_or_default().to_str()?.to_owned(); - let desc = FileDesc(file_id, ext); - files.insert(desc.clone(), (position, length)); - dirs.entry(parent_id).or_default().push(desc); - - Some(()) - })() - .is_none(); - if unsupported_path { - tracing::error!("Unsupported path in tar archive: {path:?}"); - } -} - -// We avoid the extra dependency of sync_file introduced by Zip here by opening -// the file for each read -struct Backend(PathBuf); - -impl Backend { - fn read(&self, pos: u64, len: usize) -> std::io::Result> { - File::open(self.0.clone()).and_then(|mut file| { - file.seek(SeekFrom::Start(pos)).and_then(|_| { - let mut result = vec![0; len]; - file.read_exact(result.as_mut_slice()) - .map(move |_num_bytes| result) - }) - }) - } -} - -pub struct Tar { - files: HashMap, - dirs: HashMap>, - backend: Backend, -} - -impl Tar { - /// Creates a `Tar` from a file - pub fn from_path(path: &Path) -> io::Result { - let file = File::open(path)?; - let mut tar = tar::Archive::new(file); - let contents = tar - .entries() - .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?; - let mut files = HashMap::with_capacity(contents.size_hint().0); - let mut dirs = HashMap::new(); - for entry in contents.flatten() { - if matches!(entry.header().entry_type(), EntryType::Regular) { - register_file( - entry.path().map_err(io::Error::other)?.as_ref(), - entry.raw_file_position(), - entry.size() as usize, - &mut files, - &mut dirs, - ); - } - } - Ok(Tar { - files, - dirs, - backend: Backend(path.to_path_buf()), - }) - } -} - -impl Source for Tar { - fn read(&self, id: &str, ext: &str) -> io::Result { - let id = *self.files.get(&(id, ext)).ok_or(io::ErrorKind::NotFound)?; - self.backend.read(id.0, id.1).map(FileContent::Buffer) - } - - fn read_dir(&self, id: &str, f: &mut dyn FnMut(DirEntry)) -> io::Result<()> { - let dir = self.dirs.get(id).ok_or(io::ErrorKind::NotFound)?; - dir.iter().map(FileDesc::as_dir_entry).for_each(f); - Ok(()) - } - - fn exists(&self, entry: DirEntry) -> bool { - match entry { - DirEntry::File(id, ext) => self.files.contains_key(&(id, ext)), - DirEntry::Directory(id) => self.dirs.contains_key(id), - } - } -} - -impl fmt::Debug for Tar { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Tar") - .field("files", &self.files) - .field("dirs", &self.dirs) - .finish() - } -} diff --git a/common/src/bin/csv_export/main.rs b/common/src/bin/csv_export/main.rs index dabe050436..2bca80ff20 100644 --- a/common/src/bin/csv_export/main.rs +++ b/common/src/bin/csv_export/main.rs @@ -489,10 +489,10 @@ fn entity_drops(entity_config: &str) -> Result<(), Box> { } if entity_config.eq_ignore_ascii_case("all") { - let configs = assets::load_dir::("common.entity", true) + let configs = assets::load_rec_dir::("common.entity") .expect("Entity files moved somewhere else maybe?") - .ids(); - for config in configs { + .read(); + for config in configs.ids() { write_entity_loot(&mut wtr, config)?; } } else { diff --git a/common/src/comp/inventory/item/mod.rs b/common/src/comp/inventory/item/mod.rs index 88108cbd53..72f7b5db5f 100644 --- a/common/src/comp/inventory/item/mod.rs +++ b/common/src/comp/inventory/item/mod.rs @@ -975,8 +975,11 @@ impl Item { /// asset glob pattern pub fn new_from_asset_glob(asset_glob: &str) -> Result, Error> { let specifier = asset_glob.strip_suffix(".*").unwrap_or(asset_glob); - let defs = assets::load_dir::(specifier, true)?; - defs.ids().map(|id| Item::new_from_asset(id)).collect() + let defs = assets::load_rec_dir::(specifier)?; + defs.read() + .ids() + .map(|id| Item::new_from_asset(id)) + .collect() } /// Creates a new instance of an `Item from the provided asset identifier if @@ -1632,18 +1635,19 @@ pub fn all_item_defs_expect() -> Vec { /// Returns all item asset specifiers pub fn try_all_item_defs() -> Result, Error> { - let defs = assets::load_dir::("common.items", true)?; - Ok(defs.ids().map(|id| id.to_string()).collect()) + let defs = assets::load_rec_dir::("common.items")?; + Ok(defs.read().ids().map(|id| id.to_string()).collect()) } /// Designed to return all possible items, including modulars. /// And some impossible too, like ItemKind::TagExamples. pub fn all_items_expect() -> Vec { - let defs = assets::load_dir::("common.items", true) + let defs = assets::load_rec_dir::("common.items") .expect("failed to load item asset directory"); // Grab all items from assets let mut asset_items: Vec = defs + .read() .ids() .map(|id| Item::new_from_asset_expect(id)) .collect(); diff --git a/common/src/comp/inventory/loadout_builder.rs b/common/src/comp/inventory/loadout_builder.rs index 6d29df89b9..cea9ca227a 100644 --- a/common/src/comp/inventory/loadout_builder.rs +++ b/common/src/comp/inventory/loadout_builder.rs @@ -1437,9 +1437,9 @@ mod tests { // One for asset itself and second if it serves as a base for other asset. #[test] fn validate_all_loadout_assets() { - let loadouts = assets::load_dir::("common.loadout", true) + let loadouts = assets::load_rec_dir::("common.loadout") .expect("failed to load loadout directory"); - for loadout_id in loadouts.ids() { + for loadout_id in loadouts.read().ids() { let loadout = LoadoutSpec::load_cloned(loadout_id).expect("failed to load loadout asset"); loadout @@ -1451,10 +1451,10 @@ mod tests { // Basically test that our validation tests don't have false-positives #[test] fn test_valid_assets() { - let loadouts = assets::load_dir::("test.loadout.ok", true) + let loadouts = assets::load_rec_dir::("test.loadout.ok") .expect("failed to load loadout directory"); - for loadout_id in loadouts.ids() { + for loadout_id in loadouts.read().ids() { let loadout = LoadoutSpec::load_cloned(loadout_id).expect("failed to load loadout asset"); loadout diff --git a/common/src/comp/inventory/trade_pricing.rs b/common/src/comp/inventory/trade_pricing.rs index cf6dc1f71e..2f9e1d667a 100644 --- a/common/src/comp/inventory/trade_pricing.rs +++ b/common/src/comp/inventory/trade_pricing.rs @@ -12,7 +12,7 @@ use crate::{ recipe::{default_component_recipe_book, default_recipe_book, RecipeInput}, trade::Good, }; -use assets::AssetGuard; +use assets::AssetReadGuard; use hashbrown::HashMap; use lazy_static::lazy_static; use serde::Deserialize; @@ -528,7 +528,7 @@ struct RememberedRecipe { input: Vec<(ItemDefinitionIdOwned, u32)>, } -fn get_scaling(contents: &AssetGuard, good: Good) -> f32 { +fn get_scaling(contents: &AssetReadGuard, good: Good) -> f32 { contents .good_scaling .iter() diff --git a/common/src/generation.rs b/common/src/generation.rs index 2f50b2ceb8..baf01b97f6 100644 --- a/common/src/generation.rs +++ b/common/src/generation.rs @@ -165,8 +165,8 @@ impl EntityConfig { /// Return all entity config specifiers pub fn try_all_entity_configs() -> Result, Error> { - let configs = assets::load_dir::("common.entity", true)?; - Ok(configs.ids().map(|id| id.to_string()).collect()) + let configs = assets::load_rec_dir::("common.entity")?; + Ok(configs.read().ids().map(|id| id.to_string()).collect()) } #[derive(Clone)] diff --git a/common/src/lottery.rs b/common/src/lottery.rs index 63ddc9c95d..d7e1098fcb 100644 --- a/common/src/lottery.rs +++ b/common/src/lottery.rs @@ -370,6 +370,7 @@ impl Default for LootSpec { pub mod tests { use super::*; use crate::{assets, comp::Item}; + use assets::AssetExt; #[cfg(test)] pub fn validate_loot_spec(item: &LootSpec) { @@ -379,8 +380,8 @@ pub mod tests { Item::new_from_asset_expect(item); }, LootSpec::LootTable(loot_table) => { - let loot_table = Lottery::>::load_expect_cloned(loot_table); - validate_table_contents(loot_table); + let loot_table = Lottery::>::load_expect(loot_table).read(); + validate_table_contents(&loot_table); }, LootSpec::Nothing => {}, LootSpec::ModularWeapon { @@ -428,7 +429,7 @@ pub mod tests { } } - fn validate_table_contents(table: Lottery>) { + fn validate_table_contents(table: &Lottery>) { for (_, item) in table.iter() { validate_loot_spec(item); } @@ -436,10 +437,11 @@ pub mod tests { #[test] fn test_loot_tables() { - let loot_tables = - assets::read_expect_dir::>>("common.loot_tables", true); - for loot_table in loot_tables { - validate_table_contents(loot_table.clone()); + let loot_tables = assets::load_rec_dir::>>("common.loot_tables") + .expect("load loot_tables"); + for loot_table in loot_tables.read().ids() { + let loot_table = Lottery::>::load_expect(loot_table); + validate_table_contents(&loot_table.read()); } } diff --git a/common/src/skillset_builder.rs b/common/src/skillset_builder.rs index d330f2c834..114f00f33c 100644 --- a/common/src/skillset_builder.rs +++ b/common/src/skillset_builder.rs @@ -160,8 +160,11 @@ mod tests { #[test] fn test_all_skillset_assets() { - let skillsets = assets::read_expect_dir::("common.skillset", true); - for skillset in skillsets { + let skillsets = + assets::load_rec_dir::("common.skillset").expect("load skillsets"); + for skillset in skillsets.read().ids() { + let skillset = SkillSetTree::load_expect(skillset).read(); + drop({ let mut skillset_builder = SkillSetBuilder::default(); let nodes = &*skillset.0; diff --git a/voxygen/Cargo.toml b/voxygen/Cargo.toml index a1137cb1df..9463e9d783 100644 --- a/voxygen/Cargo.toml +++ b/voxygen/Cargo.toml @@ -98,7 +98,7 @@ server = { package = "veloren-server", path = "../server", optional = true, defa clap = { workspace = true } # Utility -assets_manager = {version = "0.10", features = ["ab_glyph"]} +assets_manager = {version = "0.11", features = ["ab_glyph"]} backtrace = "0.3.40" chrono = { workspace = true } chumsky = "0.9" diff --git a/world/src/layer/shrub.rs b/world/src/layer/shrub.rs index d37ba37e49..e823281a7a 100644 --- a/world/src/layer/shrub.rs +++ b/world/src/layer/shrub.rs @@ -74,10 +74,10 @@ pub fn apply_shrubs_to(canvas: &mut Canvas, _dynamic_rng: &mut impl Rng) { let units = UnitChooser::new(shrub.seed).get(shrub.seed).into(); let shrubs = match shrub.kind { - ForestKind::Mangrove => &*JUNGLE_SHRUBS, - ForestKind::Acacia | ForestKind::Baobab => &*SAVANNAH_SHRUBS, - ForestKind::Oak | ForestKind::Chestnut => &*TEMPERATE_SHRUBS, - ForestKind::Pine => &*TAIGA_SHRUBS, + ForestKind::Mangrove => *JUNGLE_SHRUBS, + ForestKind::Acacia | ForestKind::Baobab => *SAVANNAH_SHRUBS, + ForestKind::Oak | ForestKind::Chestnut => *TEMPERATE_SHRUBS, + ForestKind::Pine => *TAIGA_SHRUBS, _ => continue, // TODO: Add more shrub varieties } .read();