2019-11-16 13:22:40 +00:00
|
|
|
|
using System;
|
|
|
|
|
using System.Collections.Generic;
|
|
|
|
|
using System.IO;
|
|
|
|
|
using System.IO.Compression;
|
|
|
|
|
using System.Linq;
|
2019-11-20 00:15:46 +00:00
|
|
|
|
using Alphaleonis.Win32.Filesystem;
|
2019-11-16 13:22:40 +00:00
|
|
|
|
using Wabbajack.Common;
|
|
|
|
|
using Wabbajack.Lib.Downloaders;
|
|
|
|
|
using Wabbajack.VirtualFileSystem;
|
|
|
|
|
using Directory = Alphaleonis.Win32.Filesystem.Directory;
|
2019-11-20 00:15:46 +00:00
|
|
|
|
using File = System.IO.File;
|
|
|
|
|
using FileInfo = System.IO.FileInfo;
|
2019-11-16 13:22:40 +00:00
|
|
|
|
using Path = Alphaleonis.Win32.Filesystem.Path;
|
|
|
|
|
|
|
|
|
|
namespace Wabbajack.Lib
|
|
|
|
|
{
|
2019-11-17 23:48:32 +00:00
|
|
|
|
public abstract class AInstaller : ABatchProcessor
|
2019-11-16 13:22:40 +00:00
|
|
|
|
{
|
|
|
|
|
public bool IgnoreMissingFiles { get; internal set; } = false;
|
|
|
|
|
|
|
|
|
|
public string OutputFolder { get; set; }
|
|
|
|
|
public string DownloadFolder { get; set; }
|
|
|
|
|
|
|
|
|
|
public ModManager ModManager;
|
|
|
|
|
|
|
|
|
|
public string ModListArchive { get; internal set; }
|
|
|
|
|
public ModList ModList { get; internal set; }
|
|
|
|
|
public Dictionary<string, string> HashedArchives { get; set; }
|
|
|
|
|
|
|
|
|
|
public void Info(string msg)
|
|
|
|
|
{
|
|
|
|
|
Utils.Log(msg);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public void Status(string msg)
|
|
|
|
|
{
|
2019-11-17 14:06:28 +00:00
|
|
|
|
Queue.Report(msg, 0);
|
2019-11-16 13:22:40 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public void Error(string msg)
|
|
|
|
|
{
|
|
|
|
|
Utils.Log(msg);
|
|
|
|
|
throw new Exception(msg);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public byte[] LoadBytesFromPath(string path)
|
|
|
|
|
{
|
|
|
|
|
using (var fs = new FileStream(ModListArchive, FileMode.Open, FileAccess.Read, FileShare.Read))
|
|
|
|
|
using (var ar = new ZipArchive(fs, ZipArchiveMode.Read))
|
|
|
|
|
using (var ms = new MemoryStream())
|
|
|
|
|
{
|
|
|
|
|
var entry = ar.GetEntry(path);
|
|
|
|
|
using (var e = entry.Open())
|
|
|
|
|
e.CopyTo(ms);
|
|
|
|
|
return ms.ToArray();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public static ModList LoadFromFile(string path)
|
|
|
|
|
{
|
|
|
|
|
using (var fs = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read))
|
|
|
|
|
using (var ar = new ZipArchive(fs, ZipArchiveMode.Read))
|
|
|
|
|
{
|
|
|
|
|
var entry = ar.GetEntry("modlist");
|
|
|
|
|
if (entry == null)
|
|
|
|
|
{
|
|
|
|
|
entry = ar.GetEntry("modlist.json");
|
|
|
|
|
using (var e = entry.Open())
|
|
|
|
|
return e.FromJSON<ModList>();
|
|
|
|
|
}
|
|
|
|
|
using (var e = entry.Open())
|
|
|
|
|
return e.FromCERAS<ModList>(ref CerasConfig.Config);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// We don't want to make the installer index all the archives, that's just a waste of time, so instead
|
|
|
|
|
/// we'll pass just enough information to VFS to let it know about the files we have.
|
|
|
|
|
/// </summary>
|
|
|
|
|
public void PrimeVFS()
|
|
|
|
|
{
|
|
|
|
|
VFS.AddKnown(HashedArchives.Select(a => new KnownFile
|
|
|
|
|
{
|
|
|
|
|
Paths = new[] { a.Value },
|
|
|
|
|
Hash = a.Key
|
|
|
|
|
}));
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
VFS.AddKnown(
|
|
|
|
|
ModList.Directives
|
|
|
|
|
.OfType<FromArchive>()
|
|
|
|
|
.Select(f => new KnownFile { Paths = f.ArchiveHashPath}));
|
|
|
|
|
|
|
|
|
|
VFS.BackfillMissing();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public void BuildFolderStructure()
|
|
|
|
|
{
|
|
|
|
|
Info("Building Folder Structure");
|
|
|
|
|
ModList.Directives
|
|
|
|
|
.Select(d => Path.Combine(OutputFolder, Path.GetDirectoryName(d.To)))
|
|
|
|
|
.ToHashSet()
|
|
|
|
|
.Do(f =>
|
|
|
|
|
{
|
|
|
|
|
if (Directory.Exists(f)) return;
|
|
|
|
|
Directory.CreateDirectory(f);
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public void InstallArchives()
|
|
|
|
|
{
|
|
|
|
|
Info("Installing Archives");
|
|
|
|
|
Info("Grouping Install Files");
|
|
|
|
|
var grouped = ModList.Directives
|
|
|
|
|
.OfType<FromArchive>()
|
|
|
|
|
.GroupBy(e => e.ArchiveHashPath[0])
|
|
|
|
|
.ToDictionary(k => k.Key);
|
|
|
|
|
var archives = ModList.Archives
|
|
|
|
|
.Select(a => new { Archive = a, AbsolutePath = HashedArchives.GetOrDefault(a.Hash) })
|
|
|
|
|
.Where(a => a.AbsolutePath != null)
|
|
|
|
|
.ToList();
|
|
|
|
|
|
|
|
|
|
Info("Installing Archives");
|
2019-11-17 14:06:28 +00:00
|
|
|
|
archives.PMap(Queue,a => InstallArchive(a.Archive, a.AbsolutePath, grouped[a.Archive.Hash]));
|
2019-11-16 13:22:40 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private void InstallArchive(Archive archive, string absolutePath, IGrouping<string, FromArchive> grouping)
|
|
|
|
|
{
|
|
|
|
|
Status($"Extracting {archive.Name}");
|
|
|
|
|
|
|
|
|
|
List<FromArchive> vFiles = grouping.Select(g =>
|
|
|
|
|
{
|
|
|
|
|
var file = VFS.Index.FileForArchiveHashPath(g.ArchiveHashPath);
|
|
|
|
|
g.FromFile = file;
|
|
|
|
|
return g;
|
|
|
|
|
}).ToList();
|
|
|
|
|
|
|
|
|
|
var onFinish = VFS.Stage(vFiles.Select(f => f.FromFile).Distinct());
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Status($"Copying files for {archive.Name}");
|
|
|
|
|
|
|
|
|
|
void CopyFile(string from, string to, bool useMove)
|
|
|
|
|
{
|
|
|
|
|
if (File.Exists(to))
|
|
|
|
|
{
|
|
|
|
|
var fi = new FileInfo(to);
|
|
|
|
|
if (fi.IsReadOnly)
|
|
|
|
|
fi.IsReadOnly = false;
|
|
|
|
|
File.Delete(to);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (File.Exists(from))
|
|
|
|
|
{
|
|
|
|
|
var fi = new FileInfo(from);
|
|
|
|
|
if (fi.IsReadOnly)
|
|
|
|
|
fi.IsReadOnly = false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if (useMove)
|
|
|
|
|
File.Move(from, to);
|
|
|
|
|
else
|
|
|
|
|
File.Copy(from, to);
|
2019-11-18 05:21:24 +00:00
|
|
|
|
// If we don't do this, the file will use the last-modified date of the file when it was compressed
|
|
|
|
|
// into an archive, which isn't really what we want in the case of files installed archives
|
|
|
|
|
File.SetLastWriteTime(to, DateTime.Now);
|
2019-11-16 13:22:40 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
vFiles.GroupBy(f => f.FromFile)
|
|
|
|
|
.DoIndexed((idx, group) =>
|
|
|
|
|
{
|
|
|
|
|
Utils.Status("Installing files", idx * 100 / vFiles.Count);
|
|
|
|
|
var firstDest = Path.Combine(OutputFolder, group.First().To);
|
|
|
|
|
CopyFile(group.Key.StagedPath, firstDest, true);
|
|
|
|
|
|
|
|
|
|
foreach (var copy in group.Skip(1))
|
|
|
|
|
{
|
|
|
|
|
var nextDest = Path.Combine(OutputFolder, copy.To);
|
|
|
|
|
CopyFile(firstDest, nextDest, false);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
Status("Unstaging files");
|
|
|
|
|
onFinish();
|
|
|
|
|
|
|
|
|
|
// Now patch all the files from this archive
|
|
|
|
|
foreach (var toPatch in grouping.OfType<PatchedFromArchive>())
|
|
|
|
|
using (var patchStream = new MemoryStream())
|
|
|
|
|
{
|
|
|
|
|
Status($"Patching {Path.GetFileName(toPatch.To)}");
|
|
|
|
|
// Read in the patch data
|
|
|
|
|
|
|
|
|
|
byte[] patchData = LoadBytesFromPath(toPatch.PatchID);
|
|
|
|
|
|
|
|
|
|
var toFile = Path.Combine(OutputFolder, toPatch.To);
|
|
|
|
|
var oldData = new MemoryStream(File.ReadAllBytes(toFile));
|
|
|
|
|
|
|
|
|
|
// Remove the file we're about to patch
|
|
|
|
|
File.Delete(toFile);
|
|
|
|
|
|
|
|
|
|
// Patch it
|
|
|
|
|
using (var outStream = File.OpenWrite(toFile))
|
|
|
|
|
{
|
|
|
|
|
BSDiff.Apply(oldData, () => new MemoryStream(patchData), outStream);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Status($"Verifying Patch {Path.GetFileName(toPatch.To)}");
|
|
|
|
|
var resultSha = toFile.FileHash();
|
|
|
|
|
if (resultSha != toPatch.Hash)
|
|
|
|
|
throw new InvalidDataException($"Invalid Hash for {toPatch.To} after patching");
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public void DownloadArchives()
|
|
|
|
|
{
|
|
|
|
|
var missing = ModList.Archives.Where(a => !HashedArchives.ContainsKey(a.Hash)).ToList();
|
|
|
|
|
Info($"Missing {missing.Count} archives");
|
|
|
|
|
|
|
|
|
|
Info("Getting Nexus API Key, if a browser appears, please accept");
|
|
|
|
|
|
|
|
|
|
var dispatchers = missing.Select(m => m.State.GetDownloader()).Distinct();
|
|
|
|
|
|
|
|
|
|
foreach (var dispatcher in dispatchers)
|
|
|
|
|
dispatcher.Prepare();
|
|
|
|
|
|
|
|
|
|
DownloadMissingArchives(missing);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private void DownloadMissingArchives(List<Archive> missing, bool download = true)
|
|
|
|
|
{
|
|
|
|
|
if (download)
|
|
|
|
|
{
|
|
|
|
|
foreach (var a in missing.Where(a => a.State.GetType() == typeof(ManualDownloader.State)))
|
|
|
|
|
{
|
|
|
|
|
var outputPath = Path.Combine(DownloadFolder, a.Name);
|
|
|
|
|
a.State.Download(a, outputPath);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
missing.Where(a => a.State.GetType() != typeof(ManualDownloader.State))
|
2019-11-17 14:06:28 +00:00
|
|
|
|
.PMap(Queue, archive =>
|
2019-11-16 13:22:40 +00:00
|
|
|
|
{
|
|
|
|
|
Info($"Downloading {archive.Name}");
|
|
|
|
|
var outputPath = Path.Combine(DownloadFolder, archive.Name);
|
|
|
|
|
|
|
|
|
|
if (download)
|
|
|
|
|
if (outputPath.FileExists())
|
|
|
|
|
File.Delete(outputPath);
|
|
|
|
|
|
|
|
|
|
return DownloadArchive(archive, download);
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public bool DownloadArchive(Archive archive, bool download)
|
|
|
|
|
{
|
|
|
|
|
try
|
|
|
|
|
{
|
2019-11-20 00:15:46 +00:00
|
|
|
|
var path = Path.Combine(DownloadFolder, archive.Name);
|
|
|
|
|
archive.State.Download(archive, path);
|
|
|
|
|
path.FileHashCached();
|
|
|
|
|
|
2019-11-16 13:22:40 +00:00
|
|
|
|
}
|
|
|
|
|
catch (Exception ex)
|
|
|
|
|
{
|
|
|
|
|
Utils.Log($"Download error for file {archive.Name}");
|
|
|
|
|
Utils.Log(ex.ToString());
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public void HashArchives()
|
|
|
|
|
{
|
|
|
|
|
HashedArchives = Directory.EnumerateFiles(DownloadFolder)
|
2019-11-20 00:15:46 +00:00
|
|
|
|
.Where(e => !e.EndsWith(Consts.HashFileExtension))
|
|
|
|
|
.PMap(Queue, e => (e.FileHashCached(), e))
|
2019-11-16 13:22:40 +00:00
|
|
|
|
.OrderByDescending(e => File.GetLastWriteTime(e.Item2))
|
|
|
|
|
.GroupBy(e => e.Item1)
|
|
|
|
|
.Select(e => e.First())
|
|
|
|
|
.ToDictionary(e => e.Item1, e => e.Item2);
|
|
|
|
|
}
|
|
|
|
|
|
2019-11-20 23:39:03 +00:00
|
|
|
|
public void ValidateFreeSpace()
|
|
|
|
|
{
|
|
|
|
|
DiskSpaceInfo DriveInfo(string path)
|
|
|
|
|
{
|
|
|
|
|
return Volume.GetDiskFreeSpace(Volume.GetUniqueVolumeNameForPath(path));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var paths = new[] {(OutputFolder, ModList.InstallSize),
|
|
|
|
|
(DownloadFolder, ModList.DownloadSize),
|
|
|
|
|
(Directory.GetCurrentDirectory(), ModList.ScratchSpaceSize)};
|
|
|
|
|
paths.GroupBy(f => DriveInfo(f.Item1).DriveName)
|
|
|
|
|
.Do(g =>
|
|
|
|
|
{
|
|
|
|
|
var required = g.Sum(i => i.Item2);
|
2019-11-24 00:04:34 +00:00
|
|
|
|
var contains = g.Sum(folder =>
|
|
|
|
|
Directory.EnumerateFiles(folder.Item1, "*", DirectoryEnumerationOptions.Recursive)
|
|
|
|
|
.Sum(file => new FileInfo(file).Length));
|
2019-11-20 23:39:03 +00:00
|
|
|
|
var available = DriveInfo(g.Key).FreeBytesAvailable;
|
2019-11-24 00:04:34 +00:00
|
|
|
|
if (required - contains > available)
|
2019-11-20 23:39:03 +00:00
|
|
|
|
throw new NotEnoughDiskSpaceException(
|
|
|
|
|
$"This modlist requires {required.ToFileSizeString()} on {g.Key} but only {available.ToFileSizeString()} is available.");
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public int RecommendQueueSize()
|
|
|
|
|
{
|
|
|
|
|
var output_size = RecommendQueueSize(OutputFolder);
|
|
|
|
|
var download_size = RecommendQueueSize(DownloadFolder);
|
|
|
|
|
var scratch_size = RecommendQueueSize(Directory.GetCurrentDirectory());
|
|
|
|
|
var result = Math.Min(output_size, Math.Min(download_size, scratch_size));
|
|
|
|
|
Utils.Log($"Recommending a queue size of {result} based on disk performance and number of cores");
|
|
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2019-11-18 05:21:24 +00:00
|
|
|
|
/// <summary>
|
|
|
|
|
/// The user may already have some files in the OutputFolder. If so we can go through these and
|
|
|
|
|
/// figure out which need to be updated, deleted, or left alone
|
|
|
|
|
/// </summary>
|
|
|
|
|
public void OptimizeModlist()
|
|
|
|
|
{
|
|
|
|
|
Utils.Log("Optimizing Modlist directives");
|
|
|
|
|
var indexed = ModList.Directives.ToDictionary(d => d.To);
|
|
|
|
|
|
2019-11-20 00:15:46 +00:00
|
|
|
|
Directory.EnumerateFiles(OutputFolder, "*", DirectoryEnumerationOptions.Recursive)
|
|
|
|
|
.PMap(Queue, f =>
|
|
|
|
|
{
|
|
|
|
|
var relative_to = f.RelativeTo(OutputFolder);
|
|
|
|
|
Utils.Status($"Checking if modlist file {relative_to}");
|
|
|
|
|
if (indexed.ContainsKey(relative_to) || f.StartsWith(DownloadFolder + Path.DirectorySeparator))
|
|
|
|
|
{
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Utils.Log($"Deleting {relative_to} it's not part of this modlist");
|
|
|
|
|
File.Delete(f);
|
|
|
|
|
});
|
|
|
|
|
|
2019-11-18 05:21:24 +00:00
|
|
|
|
indexed.Values.PMap(Queue, d =>
|
|
|
|
|
{
|
|
|
|
|
// Bit backwards, but we want to return null for
|
|
|
|
|
// all files we *want* installed. We return the files
|
|
|
|
|
// to remove from the install list.
|
2019-11-20 00:15:46 +00:00
|
|
|
|
Status($"Optimizing {d.To}");
|
2019-11-18 05:21:24 +00:00
|
|
|
|
var path = Path.Combine(OutputFolder, d.To);
|
|
|
|
|
if (!File.Exists(path)) return null;
|
|
|
|
|
|
|
|
|
|
var fi = new FileInfo(path);
|
|
|
|
|
if (fi.Length != d.Size) return null;
|
|
|
|
|
|
|
|
|
|
return path.FileHash() == d.Hash ? d : null;
|
|
|
|
|
}).Where(d => d != null)
|
|
|
|
|
.Do(d => indexed.Remove(d.To));
|
|
|
|
|
|
|
|
|
|
Utils.Log($"Optimized {ModList.Directives.Count} directives to {indexed.Count} required");
|
|
|
|
|
var requiredArchives = indexed.Values.OfType<FromArchive>()
|
|
|
|
|
.GroupBy(d => d.ArchiveHashPath[0])
|
|
|
|
|
.Select(d => d.Key)
|
|
|
|
|
.ToHashSet();
|
|
|
|
|
|
|
|
|
|
ModList.Archives = ModList.Archives.Where(a => requiredArchives.Contains(a.Hash)).ToList();
|
|
|
|
|
ModList.Directives = indexed.Values.ToList();
|
|
|
|
|
|
|
|
|
|
}
|
2019-11-16 13:22:40 +00:00
|
|
|
|
}
|
2019-11-20 23:39:03 +00:00
|
|
|
|
|
|
|
|
|
public class NotEnoughDiskSpaceException : Exception
|
|
|
|
|
{
|
|
|
|
|
public NotEnoughDiskSpaceException(string s) : base(s)
|
|
|
|
|
{
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-11-16 13:22:40 +00:00
|
|
|
|
}
|