diff --git a/Compression.BSA/BSABuilder.cs b/Compression.BSA/BSABuilder.cs
index 09f3f3e4..d8460b65 100644
--- a/Compression.BSA/BSABuilder.cs
+++ b/Compression.BSA/BSABuilder.cs
@@ -1,10 +1,10 @@
-using lz4;
+using K4os.Compression.LZ4;
+using K4os.Compression.LZ4.Streams;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
-using System.Threading.Tasks;
namespace Compression.BSA
{
@@ -327,13 +327,8 @@ namespace Compression.BSA
if (_bsa.HeaderType == VersionType.SSE)
{
var r = new MemoryStream();
-
- using (var f = LZ4Stream.CreateCompressor(r, LZ4StreamMode.Write, LZ4FrameBlockMode.Independent, LZ4FrameBlockSize.Max4MB, LZ4FrameChecksumMode.Content,
- highCompression: true, leaveInnerStreamOpen: true))
- {
-
- new MemoryStream(_rawData).CopyTo(f);
- }
+ using (var w = LZ4Stream.Encode(r, new LZ4EncoderSettings() { CompressionLevel = LZ4Level.L10_OPT}))
+ (new MemoryStream(_rawData)).CopyTo(w);
_rawData = r.ToArray();
diff --git a/Compression.BSA/BSAReader.cs b/Compression.BSA/BSAReader.cs
index cf7b3097..865b72be 100644
--- a/Compression.BSA/BSAReader.cs
+++ b/Compression.BSA/BSAReader.cs
@@ -3,7 +3,7 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
-using lz4;
+using K4os.Compression.LZ4.Streams;
namespace Compression.BSA
{
@@ -362,10 +362,8 @@ namespace Compression.BSA
file_size -= 4;
if (_bsa.HeaderType == VersionType.SSE)
{
- using (var dc = LZ4Stream.CreateDecompressor(output, LZ4StreamMode.Write, true))
- {
- rdr.BaseStream.CopyToLimit(dc, file_size);
- }
+ var r = LZ4Stream.Decode(rdr.BaseStream);
+ r.CopyTo(output);
}
else
{
diff --git a/Compression.BSA/Compression.BSA.csproj b/Compression.BSA/Compression.BSA.csproj
index 7a7ecddb..69c3e49d 100644
--- a/Compression.BSA/Compression.BSA.csproj
+++ b/Compression.BSA/Compression.BSA.csproj
@@ -50,12 +50,30 @@
MinimumRecommendedRules.ruleset
-
- ..\packages\IonKiwi.lz4.net.1.0.12\lib\net472\lz4.AnyCPU.loader.dll
+
+ ..\packages\K4os.Compression.LZ4.1.1.11\lib\net46\K4os.Compression.LZ4.dll
+
+
+ ..\packages\K4os.Compression.LZ4.Streams.1.1.11\lib\net46\K4os.Compression.LZ4.Streams.dll
+
+
+ ..\packages\K4os.Hash.xxHash.1.0.6\lib\net46\K4os.Hash.xxHash.dll
+
+ ..\packages\System.Buffers.4.4.0\lib\netstandard2.0\System.Buffers.dll
+
+
+ ..\packages\System.Memory.4.5.3\lib\netstandard2.0\System.Memory.dll
+
+
+ ..\packages\System.Numerics.Vectors.4.4.0\lib\net46\System.Numerics.Vectors.dll
+
+
+ ..\packages\System.Runtime.CompilerServices.Unsafe.4.5.2\lib\netstandard2.0\System.Runtime.CompilerServices.Unsafe.dll
+
diff --git a/Compression.BSA/Utils.cs b/Compression.BSA/Utils.cs
index 6272c174..31117c22 100644
--- a/Compression.BSA/Utils.cs
+++ b/Compression.BSA/Utils.cs
@@ -1,5 +1,4 @@
-using lz4.AnyCPU.loader;
-using System;
+using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
@@ -10,11 +9,6 @@ namespace Compression.BSA
{
private static Encoding Windows1251 = Encoding.GetEncoding(1251);
- static Utils ()
- {
- LZ4Loader.DisableVCRuntimeDetection = true;
- }
-
public static string ReadStringLen(this BinaryReader rdr)
{
var len = rdr.ReadByte();
diff --git a/Compression.BSA/packages.config b/Compression.BSA/packages.config
index 2b4a3d68..0fb520a3 100644
--- a/Compression.BSA/packages.config
+++ b/Compression.BSA/packages.config
@@ -1,4 +1,10 @@
-
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/VirtualFileSystem.Test/App.config b/VirtualFileSystem.Test/App.config
new file mode 100644
index 00000000..56efbc7b
--- /dev/null
+++ b/VirtualFileSystem.Test/App.config
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/VirtualFileSystem.Test/Program.cs b/VirtualFileSystem.Test/Program.cs
new file mode 100644
index 00000000..c85b3c9a
--- /dev/null
+++ b/VirtualFileSystem.Test/Program.cs
@@ -0,0 +1,17 @@
+using System;
+using Wabbajack.Common;
+
+namespace VirtualFileSystem.Test
+{
+ class Program
+ {
+ static void Main(string[] args)
+ {
+ Utils.SetLoggerFn(s => Console.WriteLine(s));
+ Utils.SetStatusFn((s, i) => Console.WriteLine(s));
+ WorkQueue.Init((a, b, c) => { return; },
+ (a, b) => { return; });
+ VFS.VirtualFileSystem.VFS.AddRoot(@"D:\MO2 Instances\Mod Organizer 2");
+ }
+ }
+}
diff --git a/VirtualFileSystem.Test/Properties/AssemblyInfo.cs b/VirtualFileSystem.Test/Properties/AssemblyInfo.cs
new file mode 100644
index 00000000..7746488b
--- /dev/null
+++ b/VirtualFileSystem.Test/Properties/AssemblyInfo.cs
@@ -0,0 +1,36 @@
+using System.Reflection;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+// General Information about an assembly is controlled through the following
+// set of attributes. Change these attribute values to modify the information
+// associated with an assembly.
+[assembly: AssemblyTitle("VirtualFileSystem.Test")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("")]
+[assembly: AssemblyProduct("VirtualFileSystem.Test")]
+[assembly: AssemblyCopyright("Copyright © 2019")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+
+// Setting ComVisible to false makes the types in this assembly not visible
+// to COM components. If you need to access a type in this assembly from
+// COM, set the ComVisible attribute to true on that type.
+[assembly: ComVisible(false)]
+
+// The following GUID is for the ID of the typelib if this project is exposed to COM
+[assembly: Guid("a2913dfe-18ff-468b-a6c1-55f7c0cc0ce8")]
+
+// Version information for an assembly consists of the following four values:
+//
+// Major Version
+// Minor Version
+// Build Number
+// Revision
+//
+// You can specify all the values or you can default the Build and Revision Numbers
+// by using the '*' as shown below:
+// [assembly: AssemblyVersion("1.0.*")]
+[assembly: AssemblyVersion("1.0.0.0")]
+[assembly: AssemblyFileVersion("1.0.0.0")]
diff --git a/VirtualFileSystem.Test/VirtualFileSystem.Test.csproj b/VirtualFileSystem.Test/VirtualFileSystem.Test.csproj
new file mode 100644
index 00000000..218b1aae
--- /dev/null
+++ b/VirtualFileSystem.Test/VirtualFileSystem.Test.csproj
@@ -0,0 +1,63 @@
+
+
+
+
+ Debug
+ AnyCPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}
+ Exe
+ VirtualFileSystem.Test
+ VirtualFileSystem.Test
+ v4.7.2
+ 512
+ true
+ true
+
+
+ x64
+ true
+ full
+ false
+ bin\Debug\
+ DEBUG;TRACE
+ prompt
+ 4
+
+
+ AnyCPU
+ pdbonly
+ true
+ bin\Release\
+ TRACE
+ prompt
+ 4
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {5128b489-bc28-4f66-9f0b-b4565af36cbc}
+ VirtualFileSystem
+
+
+ {B3F3FB6E-B9EB-4F49-9875-D78578BC7AE5}
+ Wabbajack.Common
+
+
+
+
\ No newline at end of file
diff --git a/VirtualFileSystem/Properties/AssemblyInfo.cs b/VirtualFileSystem/Properties/AssemblyInfo.cs
new file mode 100644
index 00000000..3693b24e
--- /dev/null
+++ b/VirtualFileSystem/Properties/AssemblyInfo.cs
@@ -0,0 +1,36 @@
+using System.Reflection;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+// General Information about an assembly is controlled through the following
+// set of attributes. Change these attribute values to modify the information
+// associated with an assembly.
+[assembly: AssemblyTitle("VirtualFileSystem")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("")]
+[assembly: AssemblyProduct("VirtualFileSystem")]
+[assembly: AssemblyCopyright("Copyright © 2019")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+
+// Setting ComVisible to false makes the types in this assembly not visible
+// to COM components. If you need to access a type in this assembly from
+// COM, set the ComVisible attribute to true on that type.
+[assembly: ComVisible(false)]
+
+// The following GUID is for the ID of the typelib if this project is exposed to COM
+[assembly: Guid("5128b489-bc28-4f66-9f0b-b4565af36cbc")]
+
+// Version information for an assembly consists of the following four values:
+//
+// Major Version
+// Minor Version
+// Build Number
+// Revision
+//
+// You can specify all the values or you can default the Build and Revision Numbers
+// by using the '*' as shown below:
+// [assembly: AssemblyVersion("1.0.*")]
+[assembly: AssemblyVersion("1.0.0.0")]
+[assembly: AssemblyFileVersion("1.0.0.0")]
diff --git a/VirtualFileSystem/VirtualFileSystem.cs b/VirtualFileSystem/VirtualFileSystem.cs
new file mode 100644
index 00000000..477bdadf
--- /dev/null
+++ b/VirtualFileSystem/VirtualFileSystem.cs
@@ -0,0 +1,585 @@
+using Compression.BSA;
+using ICSharpCode.SharpZipLib.Zip;
+using Newtonsoft.Json;
+using SevenZipExtractor;
+using System;
+using System.Collections.Generic;
+using System.Collections.Immutable;
+using System.IO;
+using System.Linq;
+using System.Reflection;
+using Wabbajack.Common;
+
+namespace VFS
+{
+ public class VirtualFileSystem
+ {
+
+ internal static string _stagedRoot;
+ public static VirtualFileSystem VFS;
+ private Dictionary _files = new Dictionary();
+ private bool _disableDiskCache;
+
+ public static string RootFolder { get; }
+ public Dictionary> HashIndex { get; private set; }
+
+ static VirtualFileSystem()
+ {
+ VFS = new VirtualFileSystem();
+ RootFolder = ".\\";
+ _stagedRoot = Path.Combine(RootFolder, "vfs_staged_files");
+ if (Directory.Exists(_stagedRoot))
+ Directory.Delete(_stagedRoot, true);
+
+ Directory.CreateDirectory(_stagedRoot);
+ }
+
+ public VirtualFileSystem ()
+ {
+ LoadFromDisk();
+ }
+
+ private void LoadFromDisk()
+ {
+ try
+ {
+ Utils.Log("Loading VFS Cache");
+ if (!File.Exists("vfs_cache.bson")) return;
+ _files = "vfs_cache.bson".FromBSON>(root_is_array: true).ToDictionary(f => f.FullPath);
+ CleanDB();
+ }
+ catch(Exception ex)
+ {
+ Utils.Log($"Purging cache due to {ex}");
+ File.Delete("vfs_cache.bson");
+ _files.Clear();
+ }
+ }
+
+ public void SyncToDisk()
+ {
+ if (!_disableDiskCache)
+ lock(this)
+ {
+ _files.Values.OfType().ToBSON("vfs_cache.bson");
+ }
+ }
+
+ public IList FilesInArchive(VirtualFile f)
+ {
+ var path = f.FullPath + "|";
+ return _files.Values
+ .Where(v => v.FullPath.StartsWith(path))
+ .ToList();
+ }
+
+
+ public void Purge(VirtualFile f)
+ {
+ var path = f.FullPath + "|";
+ lock (this)
+ {
+ _files.Values
+ .Where(v => v.FullPath.StartsWith(path) || v.FullPath == f.FullPath)
+ .ToList()
+ .Do(r => {
+ _files.Remove(r.FullPath);
+ });
+ }
+ }
+
+ public void Add(VirtualFile f)
+ {
+ lock (this)
+ {
+ if (_files.ContainsKey(f.FullPath))
+ Purge(f);
+ _files.Add(f.FullPath, f);
+ }
+ }
+
+ public VirtualFile Lookup(string f)
+ {
+ lock (this)
+ {
+ if (_files.TryGetValue(f, out var found))
+ return found;
+ return null;
+ }
+ }
+
+ ///
+ /// Remove any orphaned files in the DB.
+ ///
+ private void CleanDB()
+ {
+ Utils.Log("Cleaning VFS cache");
+ lock (this)
+ {
+ _files.Values
+ .Where(f =>
+ {
+ if (f.IsConcrete)
+ return !File.Exists(f.StagedPath);
+ while (f.ParentPath != null)
+ {
+ if (Lookup(f.ParentPath) == null)
+ return true;
+ f = Lookup(f.ParentPath);
+ }
+ return false;
+ })
+ .ToList()
+ .Do(f => _files.Remove(f.FullPath));
+ }
+ }
+
+ public void BackfillMissing()
+ {
+ lock(this)
+ {
+ _files.Values
+ .Select(f => f.ParentPath)
+ .Where(s => s != null)
+ .Where(s => !_files.ContainsKey(s))
+ .ToHashSet()
+ .Do(s =>
+ {
+ AddKnown(new VirtualFile() { Paths = s.Split('|') });
+ });
+ }
+ }
+
+ ///
+ /// Add a known file to the index, bit of a hack as we won't assume that all the fields for the archive are filled in.
+ /// you will need to manually update the SHA hash when you are done adding files, by calling `RefreshIndexes`
+ ///
+ ///
+ public void AddKnown(VirtualFile virtualFile)
+ {
+ lock(this)
+ {
+ // We don't know enough about these files to be able to store them in the disk cache
+ _disableDiskCache = true;
+ _files[virtualFile.FullPath] = virtualFile;
+ }
+ }
+
+ ///
+ /// Adds the root path to the filesystem. This may take quite some time as every file in the folder will be hashed,
+ /// and every archive examined.
+ ///
+ ///
+ public void AddRoot(string path)
+ {
+ IndexPath(path);
+ RefreshIndexes();
+ }
+
+ public void RefreshIndexes()
+ {
+ Utils.Log("Building Hash Index");
+ lock(this)
+ {
+ HashIndex = _files.Values
+ .GroupBy(f => f.Hash)
+ .ToDictionary(f => f.Key, f => (IEnumerable)f);
+ }
+ }
+
+ private void IndexPath(string path)
+ {
+ var file_list = Directory.EnumerateFiles(path, "*", SearchOption.AllDirectories).ToList();
+ Utils.Log($"Updating the cache for {file_list.Count} files");
+ file_list.PMap(f => UpdateFile(f));
+ SyncToDisk();
+ }
+
+ private void UpdateFile(string f)
+ {
+ TOP:
+ var lv = Lookup(f);
+ if (lv == null)
+ {
+ Utils.Status($"Analyzing {f}");
+
+ lv = new VirtualFile()
+ {
+ Paths = new string[] { f }
+ };
+
+ lv.Analyze();
+ Add(lv);
+ if (lv.IsArchive)
+ {
+ UpdateArchive(lv);
+ }
+ // Upsert after extraction incase extraction fails
+ }
+ if (lv.IsOutdated)
+ {
+ Purge(lv);
+ goto TOP;
+ }
+ }
+
+ private void UpdateArchive(VirtualFile f)
+ {
+ var entries = GetArchiveEntryNames(f);
+ var new_files = entries.Select(e => {
+ var new_path = new string[f.Paths.Length + 1];
+ f.Paths.CopyTo(new_path, 0);
+ new_path[f.Paths.Length] = e;
+ var nf = new VirtualFile()
+ {
+ Paths = new_path,
+ };
+ Add(nf);
+ return nf;
+ }).ToList();
+
+ // Stage the files in the archive
+ Stage(new_files);
+ // Analyze them
+ new_files.Do(file => file.Analyze());
+ // Recurse into any archives in this archive
+ new_files.Where(file => file.IsArchive).Do(file => UpdateArchive(file));
+ // Unstage the file
+ new_files.Where(file => file.IsStaged).Do(file => file.Unstage());
+
+ f.FinishedIndexing = true;
+ SyncToDisk();
+ }
+
+ public void Stage(IEnumerable files)
+ {
+ var grouped = files.SelectMany(f => f.FilesInPath)
+ .Distinct()
+ .Where(f => f.ParentArchive != null)
+ .GroupBy(f => f.ParentArchive)
+ .OrderBy(f => f.Key == null ? 0 : f.Key.Paths.Length)
+ .ToList();
+
+ foreach (var group in grouped)
+ {
+ var indexed = group.ToDictionary(e => e.Paths[group.Key.Paths.Length]);
+ FileExtractor.Extract(group.Key.StagedPath, e =>
+ {
+ if (indexed.TryGetValue(e.Name, out var file))
+ {
+ return File.OpenWrite(file.GenerateStagedName());
+ }
+ return null;
+ });
+ }
+ }
+
+
+
+ public StagingGroup StageWith(IEnumerable files)
+ {
+ var grp = new StagingGroup(files);
+ grp.Stage();
+ return grp;
+ }
+
+ public VirtualFile this[string path]
+ {
+ get
+ {
+ return Lookup(path);
+ }
+ }
+
+ internal List GetArchiveEntryNames(VirtualFile file)
+ {
+ if (!file.IsStaged)
+ throw new InvalidDataException("File is not staged");
+
+ if (file.Extension == ".bsa") {
+ using (var ar = new BSAReader(file.StagedPath))
+ {
+ return ar.Files.Select(f => f.Path).ToList();
+ }
+ }
+ if (file.Extension == ".zip")
+ {
+ using (var s = new ZipFile(File.OpenRead(file.StagedPath)))
+ {
+ s.IsStreamOwner = true;
+ s.UseZip64 = UseZip64.On;
+
+ if (s.OfType().FirstOrDefault(e => !e.CanDecompress) == null)
+ {
+ return s.OfType()
+ .Where(f => f.IsFile)
+ .Select(f => f.Name.Replace('/', '\\'))
+ .ToList();
+ }
+ }
+ }
+
+ using (var e = new ArchiveFile(file.StagedPath))
+ {
+ return e.Entries
+ .Where(f => !f.IsFolder)
+ .Select(f => f.FileName).ToList();
+ }
+
+ }
+
+ ///
+ /// Given a path that starts with a HASH, return the Virtual file referenced
+ ///
+ ///
+ ///
+ public VirtualFile FileForArchiveHashPath(string[] archiveHashPath)
+ {
+ var archive = HashIndex[archiveHashPath[0]].Where(a => a.IsArchive).OrderByDescending(a => a.LastModified).First();
+ string fullPath = archive.FullPath + "|" + String.Join("|", archiveHashPath.Skip(1));
+ return Lookup(fullPath);
+ }
+ }
+
+ public class StagingGroup : List, IDisposable
+ {
+ public StagingGroup(IEnumerable files) : base(files)
+ {
+ }
+
+ public void Dispose()
+ {
+ this.Do(f => f.Unstage());
+ }
+
+ internal void Stage()
+ {
+ VirtualFileSystem.VFS.Stage(this);
+ }
+ }
+
+ [JsonObject(MemberSerialization = MemberSerialization.OptIn)]
+ public class VirtualFile
+ {
+ public string[] _paths;
+ [JsonProperty]
+ public string[] Paths
+ {
+ get
+ {
+ return _paths;
+ }
+ set
+ {
+ for (int idx = 0; idx < value.Length; idx += 1)
+ value[idx] = String.Intern(value[idx]);
+ _paths = value;
+ }
+ }
+ [JsonProperty]
+ public string Hash { get; set; }
+ [JsonProperty]
+ public long Size { get; set; }
+ [JsonProperty]
+ public ulong LastModified { get; set; }
+
+ [JsonProperty(NullValueHandling = NullValueHandling.Ignore)]
+ public bool? FinishedIndexing { get; set; }
+
+
+ private string _fullPath;
+
+ public VirtualFile()
+ {
+ }
+
+ private string _stagedPath;
+
+
+ public string FullPath
+ {
+ get
+ {
+ if (_fullPath != null) return _fullPath;
+ _fullPath = String.Join("|", Paths);
+ return _fullPath;
+ }
+ }
+
+ public string Extension
+ {
+ get
+ {
+ return Path.GetExtension(Paths.Last());
+ }
+ }
+
+
+
+
+ ///
+ /// If this file is in an archive, return the Archive File, otherwise return null.
+ ///
+ public VirtualFile TopLevelArchive
+ {
+ get
+ {
+ if (Paths.Length == 0) return null;
+ return VirtualFileSystem.VFS[Paths[0]];
+ }
+ }
+
+ public VirtualFile ParentArchive
+ {
+ get
+ {
+ if (ParentPath == null) return null;
+ return VirtualFileSystem.VFS.Lookup(ParentPath);
+ }
+ }
+
+ private bool? _isArchive;
+ public bool IsArchive
+ {
+ get
+ {
+ if (_isArchive == null)
+ _isArchive = FileExtractor.CanExtract(Extension);
+ return (bool)_isArchive;
+ }
+ }
+
+ public bool IsStaged
+ {
+ get
+ {
+ if (IsConcrete) return true;
+ return _stagedPath != null;
+ }
+ }
+
+ public string StagedPath
+ {
+ get
+ {
+ if (!IsStaged)
+ throw new InvalidDataException("File is not staged");
+ if (IsConcrete) return Paths[0];
+ return _stagedPath;
+ }
+ set
+ {
+ if (IsStaged && value != null)
+ throw new InvalidDataException("Can't change the path of a already staged file");
+ _stagedPath = value;
+ }
+ }
+
+ public FileStream OpenRead()
+ {
+ if (!IsStaged)
+ throw new InvalidDataException("File is not staged, cannot open");
+ return File.OpenRead(_stagedPath);
+ }
+
+ ///
+ /// Calulate the file's SHA, size and last modified
+ ///
+ internal void Analyze()
+ {
+ if (!IsStaged)
+ throw new InvalidDataException("Cannot analzye a unstaged file");
+
+ var fio = new FileInfo(StagedPath);
+ Size = fio.Length;
+ Hash = Utils.FileSHA256(StagedPath);
+ LastModified = fio.LastWriteTime.ToMilliseconds();
+ }
+
+
+ ///
+ /// Delete the temoporary file associated with this file
+ ///
+ internal void Unstage()
+ {
+ if (IsStaged && !IsConcrete)
+ {
+ File.Delete(_stagedPath);
+ _stagedPath = null;
+ }
+ }
+
+ internal string GenerateStagedName()
+ {
+ if (_stagedPath != null) return _stagedPath;
+ _stagedPath = Path.Combine(VirtualFileSystem._stagedRoot, Guid.NewGuid().ToString() + Path.GetExtension(Paths.Last()));
+ return _stagedPath;
+ }
+
+ ///
+ /// Returns true if this file always exists on-disk, and doesn't need to be staged.
+ ///
+ public bool IsConcrete
+ {
+ get
+ {
+ return Paths.Length == 1;
+ }
+ }
+
+ public bool IsOutdated
+ {
+ get
+ {
+ if (IsStaged)
+ {
+ var fi = new FileInfo(StagedPath);
+ if (fi.LastWriteTime.ToMilliseconds() != LastModified || fi.Length != Size)
+ return true;
+ if (IsArchive)
+ if (!FinishedIndexing ?? true)
+ return true;
+ }
+ return false;
+ }
+
+ }
+
+ private string _parentPath;
+ public string ParentPath
+ {
+ get {
+ if (_parentPath == null && !IsConcrete)
+ _parentPath = String.Join("|", Paths.Take(Paths.Length - 1));
+ return _parentPath;
+ }
+ }
+
+ public IEnumerable FileInArchive
+ {
+ get
+ {
+ return VirtualFileSystem.VFS.FilesInArchive(this);
+ }
+ }
+
+ public string[] MakeRelativePaths()
+ {
+ var path_copy = (string[])Paths.Clone();
+ path_copy[0] = VirtualFileSystem.VFS.Lookup(Paths[0]).Hash;
+ return path_copy;
+ }
+
+ public IEnumerable FilesInPath
+ {
+ get {
+ return Enumerable.Range(1, Paths.Length)
+ .Select(i => Paths.Take(i))
+ .Select(path => VirtualFileSystem.VFS.Lookup(String.Join("|", path)));
+
+ }
+ }
+ }
+
+
+}
diff --git a/VirtualFileSystem/VirtualFileSystem.csproj b/VirtualFileSystem/VirtualFileSystem.csproj
new file mode 100644
index 00000000..973e28c4
--- /dev/null
+++ b/VirtualFileSystem/VirtualFileSystem.csproj
@@ -0,0 +1,76 @@
+
+
+
+
+ Debug
+ AnyCPU
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}
+ Library
+ Properties
+ VirtualFileSystem
+ VirtualFileSystem
+ v4.7.2
+ 512
+ true
+
+
+ true
+ full
+ false
+ bin\Debug\
+ DEBUG;TRACE
+ prompt
+ 4
+ x64
+
+
+ pdbonly
+ true
+ bin\Release\
+ TRACE
+ prompt
+ 4
+
+
+
+ ..\packages\SharpZipLib.1.2.0\lib\net45\ICSharpCode.SharpZipLib.dll
+
+
+ ..\packages\Newtonsoft.Json.12.0.2\lib\net45\Newtonsoft.Json.dll
+
+
+
+ ..\packages\System.Collections.Immutable.1.5.0\lib\netstandard2.0\System.Collections.Immutable.dll
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {ff5d892f-8ff4-44fc-8f7f-cd58f307ad1b}
+ Compression.BSA
+
+
+ {8aa97f58-5044-4bba-b8d9-a74b6947a660}
+ SevenZipExtractor
+
+
+ {b3f3fb6e-b9eb-4f49-9875-d78578bc7ae5}
+ Wabbajack.Common
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/VirtualFileSystem/packages.config b/VirtualFileSystem/packages.config
new file mode 100644
index 00000000..50dc981e
--- /dev/null
+++ b/VirtualFileSystem/packages.config
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Wabbajack.Common/Consts.cs b/Wabbajack.Common/Consts.cs
index 47fcaf62..bf2fc02e 100644
--- a/Wabbajack.Common/Consts.cs
+++ b/Wabbajack.Common/Consts.cs
@@ -30,5 +30,6 @@ namespace Wabbajack.Common
public static string WABBAJACK_INCLUDE = "WABBAJACK_INCLUDE";
public static String AppName = "Wabbajack";
+ public static string HashCacheName = "Wabbajack.hash_cache";
}
}
diff --git a/Wabbajack.Common/FileExtractor.cs b/Wabbajack.Common/FileExtractor.cs
index ea80cdd1..6b3e5469 100644
--- a/Wabbajack.Common/FileExtractor.cs
+++ b/Wabbajack.Common/FileExtractor.cs
@@ -108,11 +108,24 @@ namespace Wabbajack.Common
}
}
+ ///
+ /// Returns true if the given extension type can be extracted
+ ///
+ ///
+ ///
+ public static bool CanExtract(string v)
+ {
+ return Consts.SupportedArchives.Contains(v) || v == ".bsa";
+ }
+ // Probably replace this with VFS?
+ /*
public static void DeepExtract(string file, IEnumerable files, Func fnc, bool leave_open = false, int depth = 1)
{
// Files we need to extract at this level
- var files_for_level = files.Where(f => f.ArchiveHashPath.Length == depth).ToDictionary(e => e.From);
+ var files_for_level = files.Where(f => f.ArchiveHashPath.Length == depth)
+ .GroupBy(e => e.From)
+ .ToDictionary(e => e.Key);
// Archives we need to extract at this level
var archives_for_level = files.Where(f => f.ArchiveHashPath.Length > depth)
.GroupBy(f => f.ArchiveHashPath[depth])
@@ -127,12 +140,21 @@ namespace Wabbajack.Common
if (files_for_level.TryGetValue(e.Name, out var fe))
{
- a = fnc(fe, e);
+ foreach (var inner_fe in fe)
+ {
+ var str = fnc(inner_fe, e);
+ if (str == null) continue;
+ a = new SplittingStream(a, false, fnc(inner_fe, e), leave_open);
+ }
}
if (archives_for_level.TryGetValue(e.Name, out var archive))
{
var name = Path.GetTempFileName() + Path.GetExtension(e.Name);
+ if (disk_archives.ContainsKey(e.Name))
+ {
+
+ }
disk_archives.Add(e.Name, name);
b = File.OpenWrite(name);
}
@@ -148,6 +170,8 @@ namespace Wabbajack.Common
DeepExtract(archive.Value, archives_for_level[archive.Key], fnc, leave_open, depth + 1);
File.Delete(archive.Value);
}
+
}
+ */
}
}
diff --git a/Wabbajack.Common/Utils.cs b/Wabbajack.Common/Utils.cs
index 2e9658a4..d29b9e40 100644
--- a/Wabbajack.Common/Utils.cs
+++ b/Wabbajack.Common/Utils.cs
@@ -1,6 +1,7 @@
using ICSharpCode.SharpZipLib.BZip2;
using IniParser;
using Newtonsoft.Json;
+using Newtonsoft.Json.Bson;
using System;
using System.Collections.Generic;
using System.Diagnostics;
@@ -16,7 +17,28 @@ namespace Wabbajack.Common
{
public static class Utils
{
+ private static Action _loggerFn;
+ private static Action _statusFn;
+ public static void SetLoggerFn(Action f)
+ {
+ _loggerFn = f;
+ }
+
+ public static void SetStatusFn(Action f)
+ {
+ _statusFn = f;
+ }
+
+ public static void Log(string msg)
+ {
+ _loggerFn?.Invoke(msg);
+ }
+
+ public static void Status(string msg, int progress = 0)
+ {
+ _statusFn?.Invoke(msg, progress);
+ }
///
@@ -89,6 +111,22 @@ namespace Wabbajack.Common
File.WriteAllText(filename, JsonConvert.SerializeObject(obj, Formatting.Indented, new JsonSerializerSettings() {TypeNameHandling = TypeNameHandling.Auto}));
}
+ public static void ToBSON(this T obj, string filename)
+ {
+ using(var fo = File.OpenWrite(filename))
+ using(var br = new BsonDataWriter(fo))
+ {
+ fo.SetLength(0);
+ var serializer = JsonSerializer.Create(new JsonSerializerSettings() { TypeNameHandling = TypeNameHandling.Auto });
+ serializer.Serialize(br, obj);
+ }
+ }
+
+ public static ulong ToMilliseconds(this DateTime date)
+ {
+ return (ulong)(date - new DateTime(1970, 1, 1)).TotalMilliseconds;
+ }
+
public static string ToJSON(this T obj)
{
return JsonConvert.SerializeObject(obj, Formatting.Indented, new JsonSerializerSettings() { TypeNameHandling = TypeNameHandling.Auto });
@@ -99,6 +137,17 @@ namespace Wabbajack.Common
return JsonConvert.DeserializeObject(File.ReadAllText(filename), new JsonSerializerSettings() { TypeNameHandling = TypeNameHandling.Auto });
}
+ public static T FromBSON(this string filename, bool root_is_array = false)
+ {
+ using (var fo = File.OpenRead(filename))
+ using (var br = new BsonDataReader(fo, readRootValueAsArray: root_is_array, DateTimeKind.Local))
+ {
+ var serializer = JsonSerializer.Create(new JsonSerializerSettings() { TypeNameHandling = TypeNameHandling.Auto });
+ return serializer.Deserialize(br);
+ }
+
+ }
+
public static T FromJSONString(this string data)
{
return JsonConvert.DeserializeObject(data, new JsonSerializerSettings() { TypeNameHandling = TypeNameHandling.Auto });
@@ -269,6 +318,24 @@ namespace Wabbajack.Common
File.WriteAllText($"{DateTime.Now.ToString("yyyyMMddTHHmmss_crash_log.txt")}", ExceptionToString(e));
}
+ public static IEnumerable DistinctBy(this IEnumerable vs, Func select)
+ {
+ HashSet set = new HashSet();
+ foreach (var v in vs) {
+ var key = select(v);
+ if (set.Contains(key)) continue;
+ yield return v;
+ }
+
+ }
+
+ public static T Last(this T[] a)
+ {
+ if (a == null || a.Length == 0)
+ throw new InvalidDataException("null or empty array");
+ return a[a.Length - 1];
+ }
+
public static V GetOrDefault(this IDictionary dict, K key)
{
if (dict.TryGetValue(key, out V v)) return v;
diff --git a/Wabbajack.Common/Wabbajack.Common.csproj b/Wabbajack.Common/Wabbajack.Common.csproj
index 5083f1d0..22e41be8 100644
--- a/Wabbajack.Common/Wabbajack.Common.csproj
+++ b/Wabbajack.Common/Wabbajack.Common.csproj
@@ -62,6 +62,9 @@
..\packages\Newtonsoft.Json.12.0.2\lib\net45\Newtonsoft.Json.dll
+
+ ..\packages\Newtonsoft.Json.Bson.1.0.2\lib\net45\Newtonsoft.Json.Bson.dll
+
@@ -75,7 +78,6 @@
-
diff --git a/Wabbajack.Common/packages.config b/Wabbajack.Common/packages.config
index 3f87e68c..1d6931e8 100644
--- a/Wabbajack.Common/packages.config
+++ b/Wabbajack.Common/packages.config
@@ -3,6 +3,7 @@
+
\ No newline at end of file
diff --git a/Wabbajack.sln b/Wabbajack.sln
index 68464d5c..c79e480f 100644
--- a/Wabbajack.sln
+++ b/Wabbajack.sln
@@ -20,6 +20,10 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution
RECIPES.md = RECIPES.md
EndProjectSection
EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "VirtualFileSystem", "VirtualFileSystem\VirtualFileSystem.csproj", "{5128B489-BC28-4F66-9F0B-B4565AF36CBC}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "VirtualFileSystem.Test", "VirtualFileSystem.Test\VirtualFileSystem.Test.csproj", "{A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}"
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug (no commandargs)|Any CPU = Debug (no commandargs)|Any CPU
@@ -90,6 +94,30 @@ Global
{BA2CFEA1-072B-42D6-822A-8C6D0E3AE5D9}.Release|Any CPU.Build.0 = Release|Any CPU
{BA2CFEA1-072B-42D6-822A-8C6D0E3AE5D9}.Release|x64.ActiveCfg = Release|x64
{BA2CFEA1-072B-42D6-822A-8C6D0E3AE5D9}.Release|x64.Build.0 = Release|x64
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}.Debug (no commandargs)|Any CPU.ActiveCfg = Debug|Any CPU
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}.Debug (no commandargs)|Any CPU.Build.0 = Debug|Any CPU
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}.Debug (no commandargs)|x64.ActiveCfg = Debug|Any CPU
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}.Debug (no commandargs)|x64.Build.0 = Debug|Any CPU
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}.Debug|x64.Build.0 = Debug|Any CPU
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}.Release|Any CPU.Build.0 = Release|Any CPU
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}.Release|x64.ActiveCfg = Release|Any CPU
+ {5128B489-BC28-4F66-9F0B-B4565AF36CBC}.Release|x64.Build.0 = Release|Any CPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}.Debug (no commandargs)|Any CPU.ActiveCfg = Debug|Any CPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}.Debug (no commandargs)|Any CPU.Build.0 = Debug|Any CPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}.Debug (no commandargs)|x64.ActiveCfg = Debug|Any CPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}.Debug (no commandargs)|x64.Build.0 = Debug|Any CPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}.Debug|x64.Build.0 = Debug|Any CPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}.Release|Any CPU.Build.0 = Release|Any CPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}.Release|x64.ActiveCfg = Release|Any CPU
+ {A2913DFE-18FF-468B-A6C1-55F7C0CC0CE8}.Release|x64.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
diff --git a/Wabbajack/AppState.cs b/Wabbajack/AppState.cs
index 76f9a743..347a8519 100644
--- a/Wabbajack/AppState.cs
+++ b/Wabbajack/AppState.cs
@@ -130,6 +130,7 @@ namespace Wabbajack
public AppState(Dispatcher d, String mode)
{
+ _startTime = DateTime.Now;
ArchiveFile.SetupLibrary();
LogFile = Assembly.GetExecutingAssembly().Location + ".log";
@@ -186,6 +187,7 @@ namespace Wabbajack
public void LogMsg(string msg)
{
+ msg = $"{(DateTime.Now - _startTime).TotalSeconds:0.##} - {msg}";
dispatcher.Invoke(() => Log.Add(msg));
lock (dispatcher) {
File.AppendAllText(LogFile, msg + "\r\n");
@@ -264,6 +266,8 @@ namespace Wabbajack
}
private ICommand _begin;
+ private DateTime _startTime;
+
public ICommand Begin
{
get
@@ -312,7 +316,6 @@ namespace Wabbajack
{
try
{
- compiler.LoadArchives();
compiler.Compile();
}
catch (Exception ex)
diff --git a/Wabbajack/Compiler.cs b/Wabbajack/Compiler.cs
index 13ac5736..64ecc962 100644
--- a/Wabbajack/Compiler.cs
+++ b/Wabbajack/Compiler.cs
@@ -1,6 +1,5 @@
using Compression.BSA;
using Newtonsoft.Json;
-using SharpCompress.Archives;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
@@ -10,10 +9,10 @@ using System.Reflection;
using System.Security.Cryptography;
using System.Text;
using System.Text.RegularExpressions;
-using System.Threading.Tasks;
using System.Web;
using Wabbajack.Common;
using static Wabbajack.NexusAPI;
+using VFS;
namespace Wabbajack
{
@@ -59,9 +58,26 @@ namespace Wabbajack
public ConcurrentBag ExtraFiles { get; private set; }
public Dictionary ModInis { get; private set; }
- public List IndexedArchives;
+ public VirtualFileSystem VFS
+ {
+ get
+ {
+ return VirtualFileSystem.VFS;
+ }
- public List IndexedFiles { get; private set; }
+ }
+
+ public List IndexedArchives { get; private set; }
+ public Dictionary> IndexedFiles { get; private set; }
+
+ public class IndexedFileMatch
+ {
+ public IndexedArchive Archive;
+ public IndexedArchiveEntry Entry;
+ public DateTime LastModified;
+ }
+
+ public Dictionary> DirectMatchIndex;
public void Info(string msg, params object[] args)
{
@@ -94,85 +110,21 @@ namespace Wabbajack
GamePath = ((string)MO2Ini.General.gamePath).Replace("\\\\", "\\");
}
-
-
- public void LoadArchives()
- {
- IndexedArchives = Directory.EnumerateFiles(MO2DownloadsFolder)
- .Where(file => Consts.SupportedArchives.Contains(Path.GetExtension(file)))
- .PMap(file => LoadArchive(file));
- IndexedFiles = FlattenFiles(IndexedArchives);
- Info($"Found {IndexedFiles.Count} files in archives");
- }
-
- private List FlattenFiles(IEnumerable archives)
- {
- return archives.PMap(e => FlattenArchiveEntries(e, null, new string[0]))
- .SelectMany(e => e)
- .ToList();
- }
-
- private IEnumerable FlattenArchiveEntries(IndexedArchiveCache archive, string name, string[] path)
- {
- var new_path = new string[path.Length + 1];
- Array.Copy(path, 0, new_path, 0, path.Length);
- new_path[path.Length] = path.Length == 0 ? archive.Hash : name;
-
- foreach (var e in archive.Entries)
- {
- yield return new IndexedArchiveEntry()
- {
- Path = e.Path,
- Size = e.Size,
- Hash = e.Hash,
- HashPath = new_path
- };
- }
- if (archive.InnerArchives != null) {
- foreach (var inner in archive.InnerArchives)
- {
- foreach (var entry in FlattenArchiveEntries(inner.Value, inner.Key, new_path))
- {
- yield return entry;
- }
- }
- }
-
- }
-
-
- private const int ARCHIVE_CONTENTS_VERSION = 1;
private IndexedArchive LoadArchive(string file)
{
- TOP:
- string metaname = file + ".archive_contents";
+ var info = new IndexedArchive();
+ info.File = VFS.Lookup(file);
- if (metaname.FileExists() && new FileInfo(metaname).LastWriteTime >= new FileInfo(file).LastWriteTime)
+ info.Name = Path.GetFileName(file);
+
+ var ini_name = file + ".meta";
+ if (ini_name.FileExists())
{
- Status("Loading Archive Index for {0}", Path.GetFileName(file));
- var info = metaname.FromJSON();
- if (info.Version != ARCHIVE_CONTENTS_VERSION)
- {
- File.Delete(metaname);
- goto TOP;
- }
-
- info.Name = Path.GetFileName(file);
- info.AbsolutePath = file;
-
-
- var ini_name = file + ".meta";
- if (ini_name.FileExists())
- {
- info.IniData = ini_name.LoadIniFile();
- info.Meta = File.ReadAllText(ini_name);
- }
-
- return info;
+ info.IniData = ini_name.LoadIniFile();
+ info.Meta = File.ReadAllText(ini_name);
}
- IndexArchive(file).ToJSON(metaname);
- goto TOP;
+ return info;
}
private bool IsArchiveFile(string name)
@@ -183,70 +135,44 @@ namespace Wabbajack
return false;
}
- private IndexedArchiveCache IndexArchive(string file)
- {
- Status("Indexing {0}", Path.GetFileName(file));
- var streams = new Dictionary();
- var inner_archives = new Dictionary();
- FileExtractor.Extract(file, entry =>
- {
- Stream inner;
- if (IsArchiveFile(entry.Name))
- {
- var name = Path.GetTempFileName() + Path.GetExtension(entry.Name);
- inner_archives.Add(entry.Name, name);
- inner = File.OpenWrite(name);
- }
- else
- {
- inner = Stream.Null;
- }
- var sha = new SHA256Managed();
- var os = new CryptoStream(inner, sha, CryptoStreamMode.Write);
- streams.Add(entry.Name, (sha, (long)entry.Size));
- return os;
- });
-
- var indexed = new IndexedArchiveCache();
- indexed.Version = ARCHIVE_CONTENTS_VERSION;
- indexed.Hash = file.FileSHA256();
- indexed.Entries = streams.Select(entry =>
- {
- return new IndexedEntry()
- {
- Hash = entry.Value.Item1.Hash.ToBase64(),
- Size = (long)entry.Value.Item2,
- Path = entry.Key
- };
- }).ToList();
-
- streams.Do(e => e.Value.Item1.Dispose());
-
- if (inner_archives.Count > 0)
- {
- var result = inner_archives.Select(archive =>
- {
- return (archive.Key, IndexArchive(archive.Value));
- }).ToDictionary(e => e.Key, e => e.Item2);
- indexed.InnerArchives = result;
-
- inner_archives.Do(e => File.Delete(e.Value));
- }
-
- return indexed;
- }
-
public void Compile()
{
+ Info($"Indexing {MO2Folder}");
+ VFS.AddRoot(MO2Folder);
+ Info($"Indexing {GamePath}");
+ VFS.AddRoot(GamePath);
+
var mo2_files = Directory.EnumerateFiles(MO2Folder, "*", SearchOption.AllDirectories)
.Where(p => p.FileExists())
- .Select(p => new RawSourceFile() { Path = p.RelativeTo(MO2Folder), AbsolutePath = p });
+ .Select(p => new RawSourceFile(VFS.Lookup(p)) { Path = p.RelativeTo(MO2Folder)});
var game_files = Directory.EnumerateFiles(GamePath, "*", SearchOption.AllDirectories)
.Where(p => p.FileExists())
- .Select(p => new RawSourceFile() { Path = Path.Combine(Consts.GameFolderFilesDir, p.RelativeTo(GamePath)), AbsolutePath = p });
+ .Select(p => new RawSourceFile(VFS.Lookup(p)) { Path = Path.Combine(Consts.GameFolderFilesDir, p.RelativeTo(GamePath))});
+
+
+ Info($"Indexing Archives");
+ IndexedArchives = Directory.EnumerateFiles(MO2DownloadsFolder)
+ .Where(f => Consts.SupportedArchives.Contains(Path.GetExtension(f)))
+ .Where(f => File.Exists(f + ".meta"))
+ .Select(f => new IndexedArchive()
+ {
+ File = VFS.Lookup(f),
+ IniData = (f + ".meta").LoadIniFile(),
+ Meta = File.ReadAllText(f + ".meta")
+ })
+ .ToList();
+
+ Info($"Indexing Files");
+ IndexedFiles = IndexedArchives.PMap(f => { Status($"Finding files in {Path.GetFileName(f.File.FullPath)}");
+ return VFS.FilesInArchive(f.File); })
+ .SelectMany(fs => fs)
+ .OrderByDescending(f => f.TopLevelArchive.LastModified)
+ .GroupBy(f => f.Hash)
+ .ToDictionary(f => f.Key, f => f.AsEnumerable());
Info("Searching for mod files");
+
AllFiles = mo2_files.Concat(game_files).ToList();
Info("Found {0} files to build into mod list", AllFiles.Count);
@@ -326,7 +252,6 @@ namespace Wabbajack
private void ResetMembers()
{
AllFiles = null;
- IndexedArchives = null;
InstallDirectives = null;
SelectedArchives = null;
ExtraFiles = null;
@@ -355,40 +280,25 @@ namespace Wabbajack
private void BuildArchivePatches(string archive_sha, IEnumerable group, Dictionary absolute_paths)
{
- var archive = IndexedArchives.First(a => a.Hash == archive_sha);
- var paths = group.Select(g => g.FullPath).ToHashSet();
- var streams = new Dictionary();
- Status($"Extracting {paths.Count} patch files from {archive.Name}");
- // First we fetch the source files from the input archive
-
- FileExtractor.DeepExtract(archive.AbsolutePath, group, (fe, entry) =>
+ var archive = VFS.HashIndex[archive_sha];
+ using (var files = VFS.StageWith(group.Select(g => VFS.FileForArchiveHashPath(g.ArchiveHashPath))))
{
- if (!paths.Contains(fe.FullPath)) return null;
-
- var result = new MemoryStream();
- streams.Add(fe.FullPath, result);
- return result;
-
- }, false);
-
- var extracted = streams.ToDictionary(k => k.Key, v => v.Value.ToArray());
- // Now Create the patches
- Status("Building Patches for {0}", archive.Name);
- Info("Building Patches for {0}", archive.Name);
- group.PMap(entry =>
- {
- Info("Patching {0}", entry.To);
- var ss = extracted[entry.FullPath];
- using (var origin = new MemoryStream(ss))
- using (var output = new MemoryStream())
+ var by_path = files.GroupBy(f => string.Join("|", f.Paths.Skip(1))).ToDictionary(f => f.Key, f => f.First());
+ // Now Create the patches
+ group.PMap(entry =>
{
- var a = origin.ReadAll();
- var b = LoadDataForTo(entry.To, absolute_paths);
- BSDiff.Create(a, b, output);
- entry.Patch = output.ToArray().ToBase64();
- Info($"Patch size {entry.Patch.Length} for {entry.To}");
- }
- });
+ Info("Patching {0}", entry.To);
+ using (var origin = by_path[string.Join("|", entry.ArchiveHashPath.Skip(1))].OpenRead())
+ using (var output = new MemoryStream())
+ {
+ var a = origin.ReadAll();
+ var b = LoadDataForTo(entry.To, absolute_paths);
+ BSDiff.Create(a, b, output);
+ entry.Patch = output.ToArray().ToBase64();
+ Info($"Patch size {entry.Patch.Length} for {entry.To}");
+ }
+ });
+ }
}
@@ -416,17 +326,20 @@ namespace Wabbajack
private void GatherArchives()
{
Info($"Building a list of archives based on the files required");
- var archives = IndexedArchives.GroupBy(a => a.Hash).ToDictionary(k => k.Key, k => k.First());
var shas = InstallDirectives.OfType()
.Select(a => a.ArchiveHashPath[0])
.Distinct();
+ var archives = IndexedArchives.OrderByDescending(f => f.File.LastModified)
+ .GroupBy(f => f.File.Hash)
+ .ToDictionary(f => f.Key, f => f.First());
+
SelectedArchives = shas.PMap(sha => ResolveArchive(sha, archives));
}
- private Archive ResolveArchive(string sha, Dictionary archives)
+ private Archive ResolveArchive(string sha, IDictionary archives)
{
if (archives.TryGetValue(sha, out var found))
{
@@ -520,6 +433,13 @@ namespace Wabbajack
}
result = tmp;
}
+ else if (general.manualURL != null)
+ {
+ result = new ManualURLArchive()
+ {
+ URL = general.manualURL.ToString()
+ };
+ }
else
{
Error("No way to handle archive {0} but it's required by the modpack", found.Name);
@@ -527,7 +447,7 @@ namespace Wabbajack
}
result.Name = found.Name;
- result.Hash = found.Hash;
+ result.Hash = found.File.Hash;
result.Meta = found.Meta;
return result;
@@ -540,10 +460,12 @@ namespace Wabbajack
private Directive RunStack(IEnumerable> stack, RawSourceFile source)
{
Status("Compiling {0}", source.Path);
- return (from f in stack
- let result = f(source)
- where result != null
- select result).First();
+ foreach (var f in stack)
+ {
+ var result = f(source);
+ if (result != null) return result;
+ }
+ throw new InvalidDataException("Data fell out of the compilation stack");
}
@@ -562,6 +484,7 @@ namespace Wabbajack
IgnoreStartsWith("downloads\\"),
IgnoreStartsWith("webcache\\"),
IgnoreStartsWith("overwrite\\"),
+ IgnorePathContains("temporary_logs"),
IgnoreEndsWith(".pyc"),
IgnoreEndsWith(".log"),
IgnoreOtherProfiles(),
@@ -575,10 +498,10 @@ namespace Wabbajack
IgnoreRegex(Consts.GameFolderFilesDir + "\\\\.*\\.bsa"),
IncludeModIniData(),
DirectMatch(),
- IncludeTaggedFiles(),
DeconstructBSAs(), // Deconstruct BSAs before building patches so we don't generate massive patch files
IncludePatches(),
IncludeDummyESPs(),
+ IncludeTaggedFiles(),
// If we have no match at this point for a game folder file, skip them, we can't do anything about them
@@ -597,6 +520,22 @@ namespace Wabbajack
};
}
+ private Func IgnorePathContains(string v)
+ {
+ v = $"\\{v.Trim('\\')}\\";
+ var reason = $"Ignored because path contains {v}";
+ return source =>
+ {
+ if (source.Path.Contains(v))
+ {
+ var result = source.EvolveTo();
+ result.Reason = reason;
+ return result;
+ }
+ return null;
+ };
+ }
+
///
/// If a user includes WABBAJACK_INCLUDE directly in the notes or comments of a mod, the contents of that
@@ -672,6 +611,15 @@ namespace Wabbajack
///
private Func DeconstructBSAs()
{
+ var include_directly = ModInis.Where(kv => {
+ var general = kv.Value.General;
+ if (general.notes != null && general.notes.Contains(Consts.WABBAJACK_INCLUDE))
+ return true;
+ if (general.comments != null && general.comments.Contains(Consts.WABBAJACK_INCLUDE))
+ return true;
+ return false;
+ }).Select(kv => $"mods\\{kv.Key}\\");
+
var microstack = new List>()
{
DirectMatch(),
@@ -679,20 +627,36 @@ namespace Wabbajack
DropAll()
};
+ var microstack_with_include = new List>()
+ {
+ DirectMatch(),
+ IncludePatches(),
+ IncludeALL()
+ };
+
+
return source =>
{
if (!Consts.SupportedBSAs.Contains(Path.GetExtension(source.Path))) return null;
- var hashed = HashBSA(source.AbsolutePath);
+ bool default_include = false;
+ if (source.Path.StartsWith("mods"))
+ {
+ foreach (var modpath in include_directly)
+ {
+ if (source.Path.StartsWith(modpath))
+ {
+ default_include = true;
+ break;
+ }
+ }
+ }
- var source_files = hashed.Select(e => new RawSourceFile() {
- Hash = e.Item2,
- Path = e.Item1,
- AbsolutePath = e.Item1
- });
+ var source_files = source.File.FileInArchive;
+ var stack = default_include ? microstack_with_include : microstack;
- var matches = source_files.Select(e => RunStack(microstack, e));
+ var matches = source_files.PMap(e => RunStack(stack, new RawSourceFile(e)));
var id = Guid.NewGuid().ToString();
@@ -724,26 +688,14 @@ namespace Wabbajack
};
}
- ///
- /// Given a BSA on disk, index it and return a dictionary of SHA256 -> filename
- ///
- ///
- ///
- private List<(string, string)> HashBSA(string absolutePath)
+ private Func IncludeALL()
{
- Status($"Hashing BSA: {absolutePath}");
- var results = new List<(string, string)>();
- using (var a = new BSAReader(absolutePath))
+ return source =>
{
- a.Files.PMap(entry =>
- {
- Status($"Hashing BSA: {absolutePath} - {entry.Path}");
-
- var data = entry.GetData();
- results.Add((entry.Path, data.SHA256()));
- });
- }
- return results;
+ var inline = source.EvolveTo();
+ inline.SourceData = File.ReadAllBytes(source.AbsolutePath).ToBase64();
+ return inline;
+ };
}
private Func IgnoreDisabledMods()
@@ -766,23 +718,20 @@ namespace Wabbajack
private Func IncludePatches()
{
- var archive_shas = IndexedArchives.GroupBy(e => e.Hash)
- .ToDictionary(e => e.Key);
- var indexed = (from entry in IndexedFiles
- select new { archive = archive_shas[entry.HashPath[0]].First(),
- entry = entry })
- .GroupBy(e => Path.GetFileName(e.entry.Path).ToLower())
- .ToDictionary(e => e.Key);
+ var indexed = IndexedFiles.Values
+ .SelectMany(f => f)
+ .GroupBy(f => Path.GetFileName(f.Paths.Last()).ToLower())
+ .ToDictionary(f => f.Key);
return source =>
{
if (indexed.TryGetValue(Path.GetFileName(source.Path.ToLower()), out var value))
{
+ // TODO: Improve this
var found = value.First();
var e = source.EvolveTo();
- e.From = found.entry.Path;
- e.ArchiveHashPath = found.entry.HashPath;
+ e.ArchiveHashPath = found.MakeRelativePaths();
e.To = source.Path;
return e;
}
@@ -907,33 +856,21 @@ namespace Wabbajack
private Func DirectMatch()
{
- var archive_shas = IndexedArchives.GroupBy(e => e.Hash)
- .ToDictionary(e => e.Key);
-
- var indexed = (from entry in IndexedFiles
- select new { archive = archive_shas[entry.HashPath[0]].First(),
- entry = entry })
- .GroupBy(e => e.entry.Hash)
- .ToDictionary(e => e.Key);
-
-
return source =>
{
- if (indexed.TryGetValue(source.Hash, out var found))
+ if (IndexedFiles.TryGetValue(source.Hash, out var found))
{
var result = source.EvolveTo();
- var match = found.Where(f => Path.GetFileName(f.entry.Path) == Path.GetFileName(source.Path))
- .OrderByDescending(f => new FileInfo(f.archive.AbsolutePath).LastWriteTime)
+ var match = found.Where(f => Path.GetFileName(f.Paths[0]) == Path.GetFileName(source.Path))
.FirstOrDefault();
if (match == null)
- match = found.OrderByDescending(f => new FileInfo(f.archive.AbsolutePath).LastWriteTime)
- .FirstOrDefault();
+ match = found.FirstOrDefault();
+
+ result.ArchiveHashPath = match.MakeRelativePaths();
- result.ArchiveHashPath = match.entry.HashPath;
- result.From = match.entry.Path;
return result;
}
return null;
diff --git a/Wabbajack.Common/Data.cs b/Wabbajack/Data.cs
similarity index 83%
rename from Wabbajack.Common/Data.cs
rename to Wabbajack/Data.cs
index 0548d128..087d8bf5 100644
--- a/Wabbajack.Common/Data.cs
+++ b/Wabbajack/Data.cs
@@ -4,27 +4,36 @@ using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
+using VFS;
-namespace Wabbajack.Common
+namespace Wabbajack
{
- public class RawSourceFile
+ public class RawSourceFile
{
- public string AbsolutePath;
+ public RawSourceFile(VirtualFile file)
+ {
+ File = file;
+ }
+
+ public string AbsolutePath
+ {
+ get
+ {
+ return File.StagedPath;
+ }
+ }
+
public string Path;
- private string _hash;
+ public VirtualFile File { get; private set; }
+
public string Hash
{
get
{
- if (_hash != null) return _hash;
- _hash = AbsolutePath.FileSHA256();
- return _hash;
- }
- set
- {
- _hash = value;
+ return File.Hash;
}
+
}
public T EvolveTo() where T : Directive, new()
@@ -93,10 +102,9 @@ namespace Wabbajack.Common
/// MurMur3 hash of the archive this file comes from
///
public string[] ArchiveHashPath;
- ///
- /// The relative path of the file in the archive
- ///
- public string From;
+
+ [JsonIgnore]
+ public VirtualFile FromFile;
private string _fullPath = null;
[JsonIgnore]
@@ -105,9 +113,7 @@ namespace Wabbajack.Common
get
{
if (_fullPath == null) {
- var path = ArchiveHashPath.ToList();
- path.Add(From);
- _fullPath = String.Join("|", path);
+ _fullPath = String.Join("|", ArchiveHashPath);
}
return _fullPath;
}
@@ -174,6 +180,14 @@ namespace Wabbajack.Common
public List Headers;
}
+ ///
+ /// A URL that cannot be downloaded automatically and has to be downloaded by hand
+ ///
+ public class ManualURLArchive : Archive
+ {
+ public string URL;
+ }
+
///
/// An archive that requires additional HTTP headers.
///
@@ -203,26 +217,12 @@ namespace Wabbajack.Common
{
}
- ///
- /// The indexed contents of an archive
- ///
- public class IndexedArchiveCache
- {
- public string Hash;
- public int Version;
- public List Entries;
-
- [JsonProperty(NullValueHandling = NullValueHandling.Ignore)]
- public Dictionary InnerArchives;
- }
-
- public class IndexedArchive : IndexedArchiveCache
+ public class IndexedArchive
{
public dynamic IniData;
public string Name;
public string Meta;
- public string AbsolutePath;
- public List HashPath;
+ public VirtualFile File { get; internal set; }
}
///
diff --git a/Wabbajack/Installer.cs b/Wabbajack/Installer.cs
index 5378f02a..73952e8a 100644
--- a/Wabbajack/Installer.cs
+++ b/Wabbajack/Installer.cs
@@ -9,12 +9,21 @@ using System.Reflection;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
+using VFS;
using Wabbajack.Common;
namespace Wabbajack
{
public class Installer
{
+ public VirtualFileSystem VFS
+ {
+ get
+ {
+ return VirtualFileSystem.VFS;
+ }
+ }
+
public Installer(ModList mod_list, string output_folder, Action log_fn)
{
Outputfolder = output_folder;
@@ -88,6 +97,9 @@ namespace Wabbajack
Error("Cannot continue, was unable to download one or more archives");
}
}
+
+ PrimeVFS();
+
BuildFolderStructure();
InstallArchives();
InstallIncludedFiles();
@@ -96,6 +108,37 @@ namespace Wabbajack
Info("Installation complete! You may exit the program.");
}
+
+ ///
+ /// We don't want to make the installer index all the archives, that's just a waste of time, so instead
+ /// we'll pass just enough information to VFS to let it know about the files we have.
+ ///
+ private void PrimeVFS()
+ {
+ HashedArchives.Do(a => VFS.AddKnown(new VirtualFile()
+ {
+ Paths = new string[] { a.Value },
+ Hash = a.Key
+ }));
+ VFS.RefreshIndexes();
+
+
+ ModList.Directives
+ .OfType()
+ .Do(f =>
+ {
+ var updated_path = new string[f.ArchiveHashPath.Length];
+ f.ArchiveHashPath.CopyTo(updated_path, 0);
+ updated_path[0] = VFS.HashIndex[updated_path[0]].Where(e => e.IsConcrete).First().FullPath;
+ VFS.AddKnown(new VirtualFile() { Paths = updated_path });
+ });
+
+ VFS.BackfillMissing();
+
+
+
+ }
+
private void BuildBSAs()
{
var bsas = ModList.Directives.OfType().ToList();
@@ -131,8 +174,11 @@ namespace Wabbajack
}
});
- Info($"Removing temp folder {Consts.BSACreationDir}");
- Directory.Delete(Path.Combine(Outputfolder, Consts.BSACreationDir), true);
+ if (Directory.Exists(Consts.BSACreationDir))
+ {
+ Info($"Removing temp folder {Consts.BSACreationDir}");
+ Directory.Delete(Path.Combine(Outputfolder, Consts.BSACreationDir), true);
+ }
}
@@ -187,18 +233,18 @@ namespace Wabbajack
var files = grouping.GroupBy(e => e.FullPath)
.ToDictionary(e => e.Key);
- FileExtractor.DeepExtract(absolutePath, files.Select(f => f.Value.First()),
- (fe, entry) =>
+
+ var vfiles = files.Select(g =>
{
- if (files.TryGetValue(fe.FullPath, out var directives))
- {
- var directive = directives.First();
- var absolute = Path.Combine(Outputfolder, directive.To);
- if (absolute.FileExists()) File.Delete(absolute);
- return File.OpenWrite(absolute);
- }
- return null;
- });
+ var first_file = g.Value.First();
+ var file = VFS.FileForArchiveHashPath(first_file.ArchiveHashPath);
+ file.StagedPath = first_file.To;
+ return file;
+ }).ToList();
+
+ VFS.Stage(vfiles);
+
+ vfiles.Do(f => f.StagedPath = null);
Status("Copying duplicated files for {0}", archive.Name);
diff --git a/Wabbajack/MainWindow.xaml.cs b/Wabbajack/MainWindow.xaml.cs
index d4ac27a6..566d91f0 100644
--- a/Wabbajack/MainWindow.xaml.cs
+++ b/Wabbajack/MainWindow.xaml.cs
@@ -48,6 +48,9 @@ namespace Wabbajack
WorkQueue.Init((id, msg, progress) => context.SetProgress(id, msg, progress),
(max, current) => context.SetQueueSize(max, current));
+ Utils.SetLoggerFn(s => context.LogMsg(s));
+ Utils.SetStatusFn((msg, progress) => WorkQueue.Report(msg, progress));
+
if (DebugMode)
@@ -60,7 +63,6 @@ namespace Wabbajack
context.ModListName = compiler.MO2Profile;
context.Mode = "Building";
- compiler.LoadArchives();
compiler.Compile();
var modlist = compiler.ModList.ToJSON();
diff --git a/Wabbajack/Wabbajack.csproj b/Wabbajack/Wabbajack.csproj
index 945b7318..00229220 100644
--- a/Wabbajack/Wabbajack.csproj
+++ b/Wabbajack/Wabbajack.csproj
@@ -139,6 +139,7 @@
MSBuild:Compile
Designer
+
@@ -200,6 +201,10 @@
{8aa97f58-5044-4bba-b8d9-a74b6947a660}
SevenZipExtractor
+
+ {5128b489-bc28-4f66-9f0b-b4565af36cbc}
+ VirtualFileSystem
+
{b3f3fb6e-b9eb-4f49-9875-d78578bc7ae5}
Wabbajack.Common