diff --git a/VirtualFileSystem.Test/App.config b/VirtualFileSystem.Test/App.config
new file mode 100644
index 00000000..56efbc7b
--- /dev/null
+++ b/VirtualFileSystem.Test/App.config
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/VirtualFileSystem.Test/Program.cs b/VirtualFileSystem.Test/Program.cs
new file mode 100644
index 00000000..c36cfbca
--- /dev/null
+++ b/VirtualFileSystem.Test/Program.cs
@@ -0,0 +1,20 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using Wabbajack.Common;
+
+namespace VirtualFileSystem.Test
+{
+ class Program
+ {
+ static void Main(string[] args)
+ {
+ WorkQueue.Init((a, b, c) => { return; },
+ (a, b) => { return; });
+ var vfs = new VirtualFileSystem();
+ vfs.AddRoot(@"D:\MO2 Instances\Mod Organizer 2", s => Console.WriteLine(s));
+ }
+ }
+}
diff --git a/VirtualFileSystem.Test/Properties/AssemblyInfo.cs b/VirtualFileSystem.Test/Properties/AssemblyInfo.cs
new file mode 100644
index 00000000..7746488b
--- /dev/null
+++ b/VirtualFileSystem.Test/Properties/AssemblyInfo.cs
@@ -0,0 +1,36 @@
+using System.Reflection;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+// General Information about an assembly is controlled through the following
+// set of attributes. Change these attribute values to modify the information
+// associated with an assembly.
+[assembly: AssemblyTitle("VirtualFileSystem.Test")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("")]
+[assembly: AssemblyProduct("VirtualFileSystem.Test")]
+[assembly: AssemblyCopyright("Copyright © 2019")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+
+// Setting ComVisible to false makes the types in this assembly not visible
+// to COM components. If you need to access a type in this assembly from
+// COM, set the ComVisible attribute to true on that type.
+[assembly: ComVisible(false)]
+
+// The following GUID is for the ID of the typelib if this project is exposed to COM
+[assembly: Guid("a2913dfe-18ff-468b-a6c1-55f7c0cc0ce8")]
+
+// Version information for an assembly consists of the following four values:
+//
+// Major Version
+// Minor Version
+// Build Number
+// Revision
+//
+// You can specify all the values or you can default the Build and Revision Numbers
+// by using the '*' as shown below:
+// [assembly: AssemblyVersion("1.0.*")]
+[assembly: AssemblyVersion("1.0.0.0")]
+[assembly: AssemblyFileVersion("1.0.0.0")]
diff --git a/VirtualFileSystem.Test/VirtualFileSystem.Test.csproj b/VirtualFileSystem.Test/VirtualFileSystem.Test.csproj
new file mode 100644
index 00000000..218b1aae
--- /dev/null
+++ b/VirtualFileSystem.Test/VirtualFileSystem.Test.csproj
@@ -0,0 +1,63 @@
+
+
+
+
+ Debug
+ AnyCPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}
+ Exe
+ VirtualFileSystem.Test
+ VirtualFileSystem.Test
+ v4.7.2
+ 512
+ true
+ true
+
+
+ x64
+ true
+ full
+ false
+ bin\Debug\
+ DEBUG;TRACE
+ prompt
+ 4
+
+
+ AnyCPU
+ pdbonly
+ true
+ bin\Release\
+ TRACE
+ prompt
+ 4
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {5128b489-bc28-4f66-9f0b-b4565af36cbc}
+ VirtualFileSystem
+
+
+ {B3F3FB6E-B9EB-4F49-9875-D78578BC7AE5}
+ Wabbajack.Common
+
+
+
+
\ No newline at end of file
diff --git a/VirtualFileSystem/Properties/AssemblyInfo.cs b/VirtualFileSystem/Properties/AssemblyInfo.cs
new file mode 100644
index 00000000..3693b24e
--- /dev/null
+++ b/VirtualFileSystem/Properties/AssemblyInfo.cs
@@ -0,0 +1,36 @@
+using System.Reflection;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+// General Information about an assembly is controlled through the following
+// set of attributes. Change these attribute values to modify the information
+// associated with an assembly.
+[assembly: AssemblyTitle("VirtualFileSystem")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("")]
+[assembly: AssemblyProduct("VirtualFileSystem")]
+[assembly: AssemblyCopyright("Copyright © 2019")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+
+// Setting ComVisible to false makes the types in this assembly not visible
+// to COM components. If you need to access a type in this assembly from
+// COM, set the ComVisible attribute to true on that type.
+[assembly: ComVisible(false)]
+
+// The following GUID is for the ID of the typelib if this project is exposed to COM
+[assembly: Guid("5128b489-bc28-4f66-9f0b-b4565af36cbc")]
+
+// Version information for an assembly consists of the following four values:
+//
+// Major Version
+// Minor Version
+// Build Number
+// Revision
+//
+// You can specify all the values or you can default the Build and Revision Numbers
+// by using the '*' as shown below:
+// [assembly: AssemblyVersion("1.0.*")]
+[assembly: AssemblyVersion("1.0.0.0")]
+[assembly: AssemblyFileVersion("1.0.0.0")]
diff --git a/VirtualFileSystem/VirtualFileSystem.cs b/VirtualFileSystem/VirtualFileSystem.cs
new file mode 100644
index 00000000..4689da27
--- /dev/null
+++ b/VirtualFileSystem/VirtualFileSystem.cs
@@ -0,0 +1,372 @@
+using Compression.BSA;
+using ICSharpCode.SharpZipLib.Zip;
+using Newtonsoft.Json;
+using SevenZipExtractor;
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Reflection;
+using System.Text;
+using System.Threading.Tasks;
+using Wabbajack.Common;
+
+namespace VirtualFileSystem
+{
+ public class VirtualFileSystem
+ {
+ private Dictionary _files = new Dictionary();
+ internal string _stagedRoot;
+
+ public VirtualFileSystem()
+ {
+ _stagedRoot = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "vfs_staged_files");
+ Directory.CreateDirectory(_stagedRoot);
+ }
+
+ ///
+ /// Adds the root path to the filesystem. This may take quite some time as every file in the folder will be hashed,
+ /// and every archive examined.
+ ///
+ ///
+ public void AddRoot(string path, Action status)
+ {
+ IndexPath(path, status);
+ }
+
+ private void SyncToDisk()
+ {
+ lock (this)
+ {
+ _files.Values.ToList().ToJSON("vfs_cache.json");
+ }
+ }
+
+ private void IndexPath(string path, Action status)
+ {
+ Directory.EnumerateFiles(path, "*", SearchOption.AllDirectories)
+ .PMap(f => UpdateFile(f));
+ }
+
+ private void UpdateFile(string f)
+ {
+ TOP:
+ Console.WriteLine(f);
+ var lv = Lookup(f);
+ if (lv == null)
+ {
+ lv = new VirtualFile(this)
+ {
+ Paths = new string[] { f }
+ };
+ this[f] = lv;
+ lv.Analyze();
+ if (lv.IsArchive)
+ {
+ UpdateArchive(lv);
+ }
+ }
+ if (lv.IsOutdated)
+ {
+ Purge(lv);
+ goto TOP;
+ }
+ }
+
+ private void UpdateArchive(VirtualFile f)
+ {
+ var entries = GetArchiveEntryNames(f);
+ var new_files = entries.Select(e => {
+ var new_path = new string[f.Paths.Length + 1];
+ f.Paths.CopyTo(new_path, 0);
+ new_path[f.Paths.Length] = e;
+ var nf = new VirtualFile(this)
+ {
+ Paths = new_path,
+ };
+ this[nf.FullPath] = nf;
+ return nf;
+ }).ToList();
+
+ // Stage the files in the archive
+ Stage(new_files);
+ // Analyze them
+ new_files.Do(file => file.Analyze());
+ // Recurse into any archives in this archive
+ new_files.Where(file => file.IsArchive).Do(file => UpdateArchive(f));
+ // Unstage the file
+ new_files.Where(file => file.IsStaged).Do(file => file.Unstage());
+
+ SyncToDisk();
+
+ }
+
+ private void Stage(IEnumerable files)
+ {
+ var grouped = files.GroupBy(f => f.ParentArchive)
+ .OrderBy(f => f.Key == null ? 0 : f.Key.Paths.Length)
+ .ToList();
+
+ foreach (var group in grouped)
+ {
+ var indexed = group.ToDictionary(e => e.Paths[group.Key.Paths.Length]);
+ FileExtractor.Extract(group.Key.StagedPath, e =>
+ {
+ if (indexed.TryGetValue(e.Name, out var file))
+ {
+ return File.OpenWrite(file.GenerateStagedName());
+ }
+ return null;
+ });
+ }
+ }
+
+ internal VirtualFile Lookup(string path)
+ {
+ lock(this)
+ {
+ if (_files.TryGetValue(path, out VirtualFile value))
+ return value;
+ return null;
+ }
+ }
+
+ public VirtualFile this[string path]
+ {
+ get
+ {
+ return Lookup(path);
+ }
+ set
+ {
+ lock(this)
+ {
+ _files[path] = value;
+ }
+ }
+ }
+
+ internal List GetArchiveEntryNames(VirtualFile file)
+ {
+ if (!file.IsStaged)
+ throw new InvalidDataException("File is not staged");
+
+ if (file.Extension == ".bsa") {
+ using (var ar = new BSAReader(file.StagedPath))
+ {
+ return ar.Files.Select(f => f.Path).ToList();
+ }
+ }
+ if (file.Extension == ".zip")
+ {
+ using (var s = new ZipFile(File.OpenRead(file.StagedPath)))
+ {
+ s.IsStreamOwner = true;
+ s.UseZip64 = UseZip64.On;
+
+ if (s.OfType().FirstOrDefault(e => !e.CanDecompress) == null)
+ {
+ return s.OfType()
+ .Where(f => f.IsFile)
+ .Select(f => f.Name.Replace('/', '\\'))
+ .ToList();
+ }
+ }
+ }
+
+ using (var e = new ArchiveFile(file.StagedPath))
+ {
+ return e.Entries
+ .Where(f => !f.IsFolder)
+ .Select(f => f.FileName).ToList();
+ }
+
+ }
+
+
+
+ ///
+ /// Remove all cached data for this file and if it is a top level archive, any sub-files.
+ ///
+ ///
+ internal void Purge(VirtualFile file)
+ {
+ lock(this)
+ {
+ // Remove the file
+ _files.Remove(file.FullPath);
+
+ // If required, remove sub-files
+ if (file.IsArchive)
+ {
+ string prefix = file.FullPath + "|";
+ _files.Where(f => f.Key.StartsWith(prefix)).ToList().Do(f => _files.Remove(f.Key));
+ }
+ }
+ }
+ }
+
+ [JsonObject(MemberSerialization.OptIn)]
+ public class VirtualFile
+ {
+ [JsonProperty]
+ public string[] Paths;
+ [JsonProperty]
+ public string Hash;
+ [JsonProperty]
+ public long Size;
+ [JsonProperty]
+ public DateTime LastModifiedUTC;
+
+ private string _fullPath;
+ private VirtualFileSystem _vfs;
+
+ public VirtualFile(VirtualFileSystem vfs)
+ {
+ _vfs = vfs;
+ }
+
+ [JsonIgnore]
+ private string _stagedPath;
+
+ public string FullPath
+ {
+ get
+ {
+ if (_fullPath != null) return _fullPath;
+ _fullPath = String.Join("|", Paths);
+ return _fullPath;
+ }
+ }
+
+ public string Extension
+ {
+ get
+ {
+ return Path.GetExtension(Paths.Last());
+ }
+ }
+
+
+
+ ///
+ /// If this file is in an archive, return the Archive File, otherwise return null.
+ ///
+ public VirtualFile TopLevelArchive
+ {
+ get
+ {
+ if (Paths.Length == 0) return null;
+ return _vfs[Paths[0]];
+ }
+ }
+
+ public VirtualFile ParentArchive
+ {
+ get
+ {
+ if (Paths.Length == 0) return null;
+ return _vfs[String.Join("|", Paths.Take(Paths.Length - 1))];
+ }
+ }
+
+ private bool? _isArchive;
+ public bool IsArchive
+ {
+ get
+ {
+ if (_isArchive == null)
+ _isArchive = FileExtractor.CanExtract(Extension);
+ return (bool)_isArchive;
+ }
+ }
+
+ public bool IsStaged
+ {
+ get
+ {
+ if (IsConcrete) return true;
+ return _stagedPath != null;
+ }
+ }
+
+ public string StagedPath
+ {
+ get
+ {
+ if (!IsStaged)
+ throw new InvalidDataException("File is not staged");
+ if (IsConcrete) return Paths[0];
+ return _stagedPath;
+ }
+ }
+
+ public FileStream OpenRead()
+ {
+ if (!IsStaged)
+ throw new InvalidDataException("File is not staged, cannot open");
+ return File.OpenRead(_stagedPath);
+ }
+
+ ///
+ /// Calulate the file's SHA, size and last modified
+ ///
+ internal void Analyze()
+ {
+ if (!IsStaged)
+ throw new InvalidDataException("Cannot analzye a unstaged file");
+
+ var fio = new FileInfo(StagedPath);
+ Size = fio.Length;
+ Hash = Utils.FileSHA256(StagedPath);
+ LastModifiedUTC = fio.LastWriteTimeUtc;
+ }
+
+
+ ///
+ /// Delete the temoporary file associated with this file
+ ///
+ internal void Unstage()
+ {
+ if (IsStaged && !IsConcrete)
+ {
+ File.Delete(_stagedPath);
+ _stagedPath = null;
+ }
+ }
+
+ internal string GenerateStagedName()
+ {
+ _stagedPath = Path.Combine(_vfs._stagedRoot, Guid.NewGuid().ToString() + Path.GetExtension(Paths.Last()));
+ return _stagedPath;
+ }
+
+ ///
+ /// Returns true if this file always exists on-disk, and doesn't need to be staged.
+ ///
+ public bool IsConcrete
+ {
+ get
+ {
+ return Paths.Length == 1;
+ }
+ }
+
+ public bool IsOutdated
+ {
+ get
+ {
+ if (IsStaged)
+ {
+ var fi = new FileInfo(StagedPath);
+ if (fi.LastWriteTimeUtc != LastModifiedUTC || fi.Length != Size)
+ return true;
+ }
+ return false;
+ }
+
+ }
+ }
+
+
+}
diff --git a/VirtualFileSystem/VirtualFileSystem.csproj b/VirtualFileSystem/VirtualFileSystem.csproj
new file mode 100644
index 00000000..b5072a59
--- /dev/null
+++ b/VirtualFileSystem/VirtualFileSystem.csproj
@@ -0,0 +1,76 @@
+
+
+
+
+ Debug
+ AnyCPU
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}
+ Library
+ Properties
+ VirtualFileSystem
+ VirtualFileSystem
+ v4.7.2
+ 512
+ true
+
+
+ true
+ full
+ false
+ bin\Debug\
+ DEBUG;TRACE
+ prompt
+ 4
+ x64
+
+
+ pdbonly
+ true
+ bin\Release\
+ TRACE
+ prompt
+ 4
+
+
+
+ ..\packages\SharpZipLib.1.2.0\lib\net45\ICSharpCode.SharpZipLib.dll
+
+
+ ..\packages\LiteDB.4.1.4\lib\net40\LiteDB.dll
+
+
+ ..\packages\Newtonsoft.Json.12.0.2\lib\net45\Newtonsoft.Json.dll
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {ff5d892f-8ff4-44fc-8f7f-cd58f307ad1b}
+ Compression.BSA
+
+
+ {8aa97f58-5044-4bba-b8d9-a74b6947a660}
+ SevenZipExtractor
+
+
+ {b3f3fb6e-b9eb-4f49-9875-d78578bc7ae5}
+ Wabbajack.Common
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/VirtualFileSystem/packages.config b/VirtualFileSystem/packages.config
new file mode 100644
index 00000000..42269a6f
--- /dev/null
+++ b/VirtualFileSystem/packages.config
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Wabbajack.Common/Consts.cs b/Wabbajack.Common/Consts.cs
index 47fcaf62..bf2fc02e 100644
--- a/Wabbajack.Common/Consts.cs
+++ b/Wabbajack.Common/Consts.cs
@@ -30,5 +30,6 @@ namespace Wabbajack.Common
public static string WABBAJACK_INCLUDE = "WABBAJACK_INCLUDE";
public static String AppName = "Wabbajack";
+ public static string HashCacheName = "Wabbajack.hash_cache";
}
}
diff --git a/Wabbajack.Common/Data.cs b/Wabbajack.Common/Data.cs
index 0548d128..6c16310a 100644
--- a/Wabbajack.Common/Data.cs
+++ b/Wabbajack.Common/Data.cs
@@ -33,6 +33,11 @@ namespace Wabbajack.Common
v.To = Path;
return v;
}
+
+ public void LoadHashFromCache(HashCache cache)
+ {
+ _hash = cache.HashFile(AbsolutePath);
+ }
}
public class ModList
@@ -174,6 +179,14 @@ namespace Wabbajack.Common
public List Headers;
}
+ ///
+ /// A URL that cannot be downloaded automatically and has to be downloaded by hand
+ ///
+ public class ManualURLArchive : Archive
+ {
+ public string URL;
+ }
+
///
/// An archive that requires additional HTTP headers.
///
diff --git a/Wabbajack.Common/FileExtractor.cs b/Wabbajack.Common/FileExtractor.cs
index ea80cdd1..011e2d7f 100644
--- a/Wabbajack.Common/FileExtractor.cs
+++ b/Wabbajack.Common/FileExtractor.cs
@@ -108,11 +108,22 @@ namespace Wabbajack.Common
}
}
+ ///
+ /// Returns true if the given extension type can be extracted
+ ///
+ ///
+ ///
+ public static bool CanExtract(string v)
+ {
+ return Consts.SupportedArchives.Contains(v) || v == ".bsa";
+ }
public static void DeepExtract(string file, IEnumerable files, Func fnc, bool leave_open = false, int depth = 1)
{
// Files we need to extract at this level
- var files_for_level = files.Where(f => f.ArchiveHashPath.Length == depth).ToDictionary(e => e.From);
+ var files_for_level = files.Where(f => f.ArchiveHashPath.Length == depth)
+ .GroupBy(e => e.From)
+ .ToDictionary(e => e.Key);
// Archives we need to extract at this level
var archives_for_level = files.Where(f => f.ArchiveHashPath.Length > depth)
.GroupBy(f => f.ArchiveHashPath[depth])
@@ -127,12 +138,21 @@ namespace Wabbajack.Common
if (files_for_level.TryGetValue(e.Name, out var fe))
{
- a = fnc(fe, e);
+ foreach (var inner_fe in fe)
+ {
+ var str = fnc(inner_fe, e);
+ if (str == null) continue;
+ a = new SplittingStream(a, false, fnc(inner_fe, e), leave_open);
+ }
}
if (archives_for_level.TryGetValue(e.Name, out var archive))
{
var name = Path.GetTempFileName() + Path.GetExtension(e.Name);
+ if (disk_archives.ContainsKey(e.Name))
+ {
+
+ }
disk_archives.Add(e.Name, name);
b = File.OpenWrite(name);
}
diff --git a/Wabbajack.Common/HashCache.cs b/Wabbajack.Common/HashCache.cs
new file mode 100644
index 00000000..62a21562
--- /dev/null
+++ b/Wabbajack.Common/HashCache.cs
@@ -0,0 +1,127 @@
+using Compression.BSA;
+using System;
+using System.Collections.Concurrent;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace Wabbajack.Common
+{
+
+ public class HashCache : IDisposable
+ {
+ public class Entry
+ {
+ public string name;
+ public string hash;
+ public long size;
+ public DateTime last_modified;
+ }
+
+ public class BSA
+ {
+ public string full_path;
+ public string hash;
+ public long size;
+ public DateTime last_modified;
+ public Dictionary entries;
+ }
+
+ private ConcurrentDictionary _hashes = new ConcurrentDictionary();
+ private ConcurrentDictionary _bsas = new ConcurrentDictionary();
+ private bool disposed;
+
+ public class DB
+ {
+ public List entries;
+ public List bsas;
+ }
+
+ public HashCache()
+ {
+ if (Consts.HashCacheName.FileExists())
+ {
+ var json = Consts.HashCacheName.FromJSON();
+ _hashes = new ConcurrentDictionary(json.entries.Select(e => new KeyValuePair(e.name, e)));
+ _bsas = new ConcurrentDictionary(json.bsas.Select(e => new KeyValuePair(e.full_path, e)));
+ }
+ }
+
+ public string HashFile(string filename)
+ {
+ TOP:
+ var result = _hashes.GetOrAdd(filename,
+ s =>
+ {
+ var fi = new FileInfo(filename);
+ return new Entry
+ {
+ name = filename,
+ hash = Utils.FileSHA256(filename),
+ size = fi.Length,
+ last_modified = fi.LastWriteTimeUtc
+ };
+ });
+
+ var info = new FileInfo(filename);
+ if (info.LastWriteTimeUtc != result.last_modified || info.Length != result.size)
+ {
+ _hashes.TryRemove(filename, out Entry v);
+ goto TOP;
+ }
+ return result.hash;
+ }
+
+ public void Dispose()
+ {
+ if (disposed) return;
+ new DB
+ {
+ entries = _hashes.Values.ToList(),
+ bsas = _bsas.Values.ToList()
+ }.ToJSON(Consts.HashCacheName);
+ disposed = true;
+ _hashes = null;
+ _bsas = null;
+ }
+
+ public List<(string, string)> HashBSA(string absolutePath, Action status)
+ {
+ TOP:
+ var finfo = new FileInfo(absolutePath);
+ if (_bsas.TryGetValue(absolutePath, out BSA ar))
+ {
+ if (ar.last_modified == finfo.LastWriteTimeUtc && ar.size == finfo.Length)
+ return ar.entries.Select(kv => (kv.Key, kv.Value)).ToList();
+
+ _bsas.TryRemove(absolutePath, out BSA value);
+ }
+
+ var bsa = new BSA()
+ {
+ full_path = absolutePath,
+ size = finfo.Length,
+ last_modified = finfo.LastAccessTimeUtc,
+ };
+
+ var entries = new ConcurrentBag<(string, string)>();
+ status($"Hashing BSA: {absolutePath}");
+
+ using (var a = new BSAReader(absolutePath))
+ {
+ a.Files.PMap(entry =>
+ {
+ status($"Hashing BSA: {absolutePath} - {entry.Path}");
+ var data = entry.GetData();
+ entries.Add((entry.Path, data.SHA256()));
+ });
+ }
+ bsa.entries = entries.ToDictionary(e => e.Item1, e => e.Item2);
+ _bsas.TryAdd(absolutePath, bsa);
+
+ goto TOP;
+ }
+ }
+}
diff --git a/Wabbajack.Common/Utils.cs b/Wabbajack.Common/Utils.cs
index 2e9658a4..2db6ae2c 100644
--- a/Wabbajack.Common/Utils.cs
+++ b/Wabbajack.Common/Utils.cs
@@ -269,6 +269,24 @@ namespace Wabbajack.Common
File.WriteAllText($"{DateTime.Now.ToString("yyyyMMddTHHmmss_crash_log.txt")}", ExceptionToString(e));
}
+ public static IEnumerable DistinctBy(this IEnumerable vs, Func select)
+ {
+ HashSet set = new HashSet();
+ foreach (var v in vs) {
+ var key = select(v);
+ if (set.Contains(key)) continue;
+ yield return v;
+ }
+
+ }
+
+ public static T Last(this T[] a)
+ {
+ if (a == null || a.Length == 0)
+ throw new InvalidDataException("null or empty array");
+ return a[a.Length - 1];
+ }
+
public static V GetOrDefault(this IDictionary dict, K key)
{
if (dict.TryGetValue(key, out V v)) return v;
diff --git a/Wabbajack.Common/Wabbajack.Common.csproj b/Wabbajack.Common/Wabbajack.Common.csproj
index 5083f1d0..666ace66 100644
--- a/Wabbajack.Common/Wabbajack.Common.csproj
+++ b/Wabbajack.Common/Wabbajack.Common.csproj
@@ -78,6 +78,7 @@
+
diff --git a/Wabbajack.sln b/Wabbajack.sln
index 68464d5c..c79e480f 100644
--- a/Wabbajack.sln
+++ b/Wabbajack.sln
@@ -20,6 +20,10 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution
RECIPES.md = RECIPES.md
EndProjectSection
EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "VirtualFileSystem", "VirtualFileSystem\VirtualFileSystem.csproj", "{5128B489-BC28-4F66-9F0B-B4565AF36CBC}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "VirtualFileSystem.Test", "VirtualFileSystem.Test\VirtualFileSystem.Test.csproj", "{A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}"
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug (no commandargs)|Any CPU = Debug (no commandargs)|Any CPU
@@ -90,6 +94,30 @@ Global
{BA2CFEA1-072B-42D6-822A-8C6D0E3AE5D9}.Release|Any CPU.Build.0 = Release|Any CPU
{BA2CFEA1-072B-42D6-822A-8C6D0E3AE5D9}.Release|x64.ActiveCfg = Release|x64
{BA2CFEA1-072B-42D6-822A-8C6D0E3AE5D9}.Release|x64.Build.0 = Release|x64
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}.Debug (no commandargs)|Any CPU.ActiveCfg = Debug|Any CPU
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}.Debug (no commandargs)|Any CPU.Build.0 = Debug|Any CPU
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}.Debug (no commandargs)|x64.ActiveCfg = Debug|Any CPU
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}.Debug (no commandargs)|x64.Build.0 = Debug|Any CPU
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}.Debug|x64.Build.0 = Debug|Any CPU
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}.Release|Any CPU.Build.0 = Release|Any CPU
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}.Release|x64.ActiveCfg = Release|Any CPU
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}.Release|x64.Build.0 = Release|Any CPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}.Debug (no commandargs)|Any CPU.ActiveCfg = Debug|Any CPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}.Debug (no commandargs)|Any CPU.Build.0 = Debug|Any CPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}.Debug (no commandargs)|x64.ActiveCfg = Debug|Any CPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}.Debug (no commandargs)|x64.Build.0 = Debug|Any CPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}.Debug|x64.Build.0 = Debug|Any CPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}.Release|Any CPU.Build.0 = Release|Any CPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}.Release|x64.ActiveCfg = Release|Any CPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}.Release|x64.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
diff --git a/Wabbajack/Compiler.cs b/Wabbajack/Compiler.cs
index 13ac5736..8b9b1046 100644
--- a/Wabbajack/Compiler.cs
+++ b/Wabbajack/Compiler.cs
@@ -63,6 +63,15 @@ namespace Wabbajack
public List IndexedFiles { get; private set; }
+ public class IndexedFileMatch
+ {
+ public IndexedArchive Archive;
+ public IndexedArchiveEntry Entry;
+ public DateTime LastModified;
+ }
+
+ public Dictionary> DirectMatchIndex;
+
public void Info(string msg, params object[] args)
{
if (args.Length > 0)
@@ -249,6 +258,23 @@ namespace Wabbajack
Info("Searching for mod files");
AllFiles = mo2_files.Concat(game_files).ToList();
+ Info("Hashing files");
+
+ HashCache cache;
+
+ using (cache = new HashCache())
+ {
+ AllFiles.PMap(f => {
+ Status($"Hashing {f.Path}");
+ try
+ {
+ f.LoadHashFromCache(cache);
+ }
+ catch (IOException ex) { }
+ return f;
+ });
+ }
+
Info("Found {0} files to build into mod list", AllFiles.Count);
ExtraFiles = new ConcurrentBag();
@@ -265,11 +291,15 @@ namespace Wabbajack
.Where(f => f.Item2 != null)
.ToDictionary(f => f.Item1, f => f.Item2);
- var stack = MakeStack();
+ cache = new HashCache();
+
+ var stack = MakeStack(cache);
Info("Running Compilation Stack");
var results = AllFiles.PMap(f => RunStack(stack, f)).ToList();
+ cache.Dispose();
+
// Add the extra files that were generated by the stack
Info($"Adding {ExtraFiles.Count} that were generated by the stack");
results = results.Concat(ExtraFiles).ToList();
@@ -356,14 +386,16 @@ namespace Wabbajack
private void BuildArchivePatches(string archive_sha, IEnumerable group, Dictionary absolute_paths)
{
var archive = IndexedArchives.First(a => a.Hash == archive_sha);
- var paths = group.Select(g => g.FullPath).ToHashSet();
+ var paths = group.Select(g => g.FullPath)
+ .ToHashSet();
var streams = new Dictionary();
Status($"Extracting {paths.Count} patch files from {archive.Name}");
// First we fetch the source files from the input archive
- FileExtractor.DeepExtract(archive.AbsolutePath, group, (fe, entry) =>
+ FileExtractor.DeepExtract(archive.AbsolutePath, group.DistinctBy(f => f.FullPath), (fe, entry) =>
{
if (!paths.Contains(fe.FullPath)) return null;
+ if (streams.ContainsKey(fe.FullPath)) return null;
var result = new MemoryStream();
streams.Add(fe.FullPath, result);
@@ -520,6 +552,13 @@ namespace Wabbajack
}
result = tmp;
}
+ else if (general.manualURL != null)
+ {
+ result = new ManualURLArchive()
+ {
+ URL = general.manualURL.ToString()
+ };
+ }
else
{
Error("No way to handle archive {0} but it's required by the modpack", found.Name);
@@ -540,10 +579,12 @@ namespace Wabbajack
private Directive RunStack(IEnumerable> stack, RawSourceFile source)
{
Status("Compiling {0}", source.Path);
- return (from f in stack
- let result = f(source)
- where result != null
- select result).First();
+ foreach (var f in stack)
+ {
+ var result = f(source);
+ if (result != null) return result;
+ }
+ throw new InvalidDataException("Data fell out of the compilation stack");
}
@@ -553,7 +594,7 @@ namespace Wabbajack
/// result included into the pack
///
///
- private IEnumerable> MakeStack()
+ private IEnumerable> MakeStack(HashCache cache)
{
Info("Generating compilation stack");
return new List>()
@@ -562,6 +603,7 @@ namespace Wabbajack
IgnoreStartsWith("downloads\\"),
IgnoreStartsWith("webcache\\"),
IgnoreStartsWith("overwrite\\"),
+ IgnorePathContains("temporary_logs"),
IgnoreEndsWith(".pyc"),
IgnoreEndsWith(".log"),
IgnoreOtherProfiles(),
@@ -575,10 +617,10 @@ namespace Wabbajack
IgnoreRegex(Consts.GameFolderFilesDir + "\\\\.*\\.bsa"),
IncludeModIniData(),
DirectMatch(),
- IncludeTaggedFiles(),
- DeconstructBSAs(), // Deconstruct BSAs before building patches so we don't generate massive patch files
+ DeconstructBSAs(cache), // Deconstruct BSAs before building patches so we don't generate massive patch files
IncludePatches(),
IncludeDummyESPs(),
+ IncludeTaggedFiles(),
// If we have no match at this point for a game folder file, skip them, we can't do anything about them
@@ -597,6 +639,22 @@ namespace Wabbajack
};
}
+ private Func IgnorePathContains(string v)
+ {
+ v = $"\\{v.Trim('\\')}\\";
+ var reason = $"Ignored because path contains {v}";
+ return source =>
+ {
+ if (source.Path.Contains(v))
+ {
+ var result = source.EvolveTo();
+ result.Reason = reason;
+ return result;
+ }
+ return null;
+ };
+ }
+
///
/// If a user includes WABBAJACK_INCLUDE directly in the notes or comments of a mod, the contents of that
@@ -670,8 +728,17 @@ namespace Wabbajack
/// all of the files.
///
///
- private Func DeconstructBSAs()
+ private Func DeconstructBSAs(HashCache cache)
{
+ var include_directly = ModInis.Where(kv => {
+ var general = kv.Value.General;
+ if (general.notes != null && general.notes.Contains(Consts.WABBAJACK_INCLUDE))
+ return true;
+ if (general.comments != null && general.comments.Contains(Consts.WABBAJACK_INCLUDE))
+ return true;
+ return false;
+ }).Select(kv => $"mods\\{kv.Key}\\");
+
var microstack = new List>()
{
DirectMatch(),
@@ -679,11 +746,32 @@ namespace Wabbajack
DropAll()
};
+ var microstack_with_include = new List>()
+ {
+ DirectMatch(),
+ IncludePatches(),
+ IncludeALL()
+ };
+
+
return source =>
{
if (!Consts.SupportedBSAs.Contains(Path.GetExtension(source.Path))) return null;
- var hashed = HashBSA(source.AbsolutePath);
+ bool default_include = false;
+ if (source.Path.StartsWith("mods"))
+ {
+ foreach (var modpath in include_directly)
+ {
+ if (source.Path.StartsWith(modpath))
+ {
+ default_include = true;
+ break;
+ }
+ }
+ }
+
+ var hashed = cache.HashBSA(source.AbsolutePath, s => Status(s));
var source_files = hashed.Select(e => new RawSourceFile() {
Hash = e.Item2,
@@ -691,8 +779,9 @@ namespace Wabbajack
AbsolutePath = e.Item1
});
+ var stack = default_include ? microstack_with_include : microstack;
- var matches = source_files.Select(e => RunStack(microstack, e));
+ var matches = source_files.PMap(e => RunStack(stack, e));
var id = Guid.NewGuid().ToString();
@@ -724,12 +813,22 @@ namespace Wabbajack
};
}
+ private Func IncludeALL()
+ {
+ return source =>
+ {
+ var inline = source.EvolveTo();
+ inline.SourceData = File.ReadAllBytes(source.AbsolutePath).ToBase64();
+ return inline;
+ };
+ }
+
///
/// Given a BSA on disk, index it and return a dictionary of SHA256 -> filename
///
///
///
- private List<(string, string)> HashBSA(string absolutePath)
+ private List<(string, string)> HashBSA(HashCache cache, string absolutePath)
{
Status($"Hashing BSA: {absolutePath}");
var results = new List<(string, string)>();
@@ -909,31 +1008,37 @@ namespace Wabbajack
{
var archive_shas = IndexedArchives.GroupBy(e => e.Hash)
.ToDictionary(e => e.Key);
+ if (DirectMatchIndex == null)
+ {
- var indexed = (from entry in IndexedFiles
- select new { archive = archive_shas[entry.HashPath[0]].First(),
- entry = entry })
- .GroupBy(e => e.entry.Hash)
- .ToDictionary(e => e.Key);
-
-
+ DirectMatchIndex = IndexedFiles.PMap(entry => {
+ var archive = archive_shas[entry.HashPath[0]].First();
+ return new IndexedFileMatch
+ {
+ Archive = archive,
+ Entry = entry,
+ LastModified = new FileInfo(archive.AbsolutePath).LastAccessTimeUtc
+ };
+ })
+ .OrderByDescending(e => e.LastModified)
+ .GroupBy(e => e.Entry.Hash)
+ .ToDictionary(e => e.Key, e => e.AsEnumerable());
+ }
return source =>
{
- if (indexed.TryGetValue(source.Hash, out var found))
+ if (DirectMatchIndex.TryGetValue(source.Hash, out var found))
{
var result = source.EvolveTo();
- var match = found.Where(f => Path.GetFileName(f.entry.Path) == Path.GetFileName(source.Path))
- .OrderByDescending(f => new FileInfo(f.archive.AbsolutePath).LastWriteTime)
+ var match = found.Where(f => Path.GetFileName(f.Entry.Path) == Path.GetFileName(source.Path))
.FirstOrDefault();
if (match == null)
- match = found.OrderByDescending(f => new FileInfo(f.archive.AbsolutePath).LastWriteTime)
- .FirstOrDefault();
+ match = found.FirstOrDefault();
- result.ArchiveHashPath = match.entry.HashPath;
- result.From = match.entry.Path;
+ result.ArchiveHashPath = match.Entry.HashPath;
+ result.From = match.Entry.Path;
return result;
}
return null;