2019-11-16 13:22:40 +00:00
using System ;
using System.Collections.Generic ;
using System.IO ;
using System.IO.Compression ;
using System.Linq ;
2019-12-04 01:26:26 +00:00
using System.Threading.Tasks ;
2019-11-20 00:15:46 +00:00
using Alphaleonis.Win32.Filesystem ;
2019-11-16 13:22:40 +00:00
using Wabbajack.Common ;
using Wabbajack.Lib.Downloaders ;
using Wabbajack.VirtualFileSystem ;
using Directory = Alphaleonis . Win32 . Filesystem . Directory ;
2019-11-24 13:04:57 +00:00
using File = Alphaleonis . Win32 . Filesystem . File ;
using FileInfo = Alphaleonis . Win32 . Filesystem . FileInfo ;
2019-11-16 13:22:40 +00:00
using Path = Alphaleonis . Win32 . Filesystem . Path ;
namespace Wabbajack.Lib
{
2019-11-17 23:48:32 +00:00
public abstract class AInstaller : ABatchProcessor
2019-11-16 13:22:40 +00:00
{
public bool IgnoreMissingFiles { get ; internal set ; } = false ;
2019-12-01 20:22:33 +00:00
public string OutputFolder { get ; private set ; }
public string DownloadFolder { get ; private set ; }
2019-11-16 13:22:40 +00:00
2019-12-01 20:22:33 +00:00
public abstract ModManager ModManager { get ; }
2019-11-16 13:22:40 +00:00
2019-12-01 20:22:33 +00:00
public string ModListArchive { get ; private set ; }
public ModList ModList { get ; private set ; }
2019-11-16 13:22:40 +00:00
public Dictionary < string , string > HashedArchives { get ; set ; }
2019-12-01 20:22:33 +00:00
public AInstaller ( string archive , ModList modList , string outputFolder , string downloadFolder )
{
ModList = modList ;
ModListArchive = archive ;
OutputFolder = outputFolder ;
DownloadFolder = downloadFolder ;
}
2019-11-16 13:22:40 +00:00
public void Info ( string msg )
{
Utils . Log ( msg ) ;
}
public void Status ( string msg )
{
2019-11-17 14:06:28 +00:00
Queue . Report ( msg , 0 ) ;
2019-11-16 13:22:40 +00:00
}
public void Error ( string msg )
{
Utils . Log ( msg ) ;
throw new Exception ( msg ) ;
}
public byte [ ] LoadBytesFromPath ( string path )
{
using ( var fs = new FileStream ( ModListArchive , FileMode . Open , FileAccess . Read , FileShare . Read ) )
using ( var ar = new ZipArchive ( fs , ZipArchiveMode . Read ) )
using ( var ms = new MemoryStream ( ) )
{
var entry = ar . GetEntry ( path ) ;
using ( var e = entry . Open ( ) )
e . CopyTo ( ms ) ;
return ms . ToArray ( ) ;
}
}
public static ModList LoadFromFile ( string path )
{
using ( var fs = new FileStream ( path , FileMode . Open , FileAccess . Read , FileShare . Read ) )
using ( var ar = new ZipArchive ( fs , ZipArchiveMode . Read ) )
{
var entry = ar . GetEntry ( "modlist" ) ;
if ( entry = = null )
{
entry = ar . GetEntry ( "modlist.json" ) ;
using ( var e = entry . Open ( ) )
return e . FromJSON < ModList > ( ) ;
}
using ( var e = entry . Open ( ) )
2019-12-20 20:01:01 +00:00
return e . FromCERAS < ModList > ( CerasConfig . Config ) ;
2019-11-16 13:22:40 +00:00
}
}
/// <summary>
/// We don't want to make the installer index all the archives, that's just a waste of time, so instead
/// we'll pass just enough information to VFS to let it know about the files we have.
/// </summary>
2019-12-07 02:54:27 +00:00
public async Task PrimeVFS ( )
2019-11-16 13:22:40 +00:00
{
VFS . AddKnown ( HashedArchives . Select ( a = > new KnownFile
{
Paths = new [ ] { a . Value } ,
Hash = a . Key
} ) ) ;
VFS . AddKnown (
ModList . Directives
. OfType < FromArchive > ( )
. Select ( f = > new KnownFile { Paths = f . ArchiveHashPath } ) ) ;
2019-12-07 02:54:27 +00:00
await VFS . BackfillMissing ( ) ;
2019-11-16 13:22:40 +00:00
}
public void BuildFolderStructure ( )
{
Info ( "Building Folder Structure" ) ;
ModList . Directives
. Select ( d = > Path . Combine ( OutputFolder , Path . GetDirectoryName ( d . To ) ) )
. ToHashSet ( )
. Do ( f = >
{
if ( Directory . Exists ( f ) ) return ;
Directory . CreateDirectory ( f ) ;
} ) ;
}
2019-12-04 01:26:26 +00:00
public async Task InstallArchives ( )
2019-11-16 13:22:40 +00:00
{
Info ( "Installing Archives" ) ;
Info ( "Grouping Install Files" ) ;
var grouped = ModList . Directives
. OfType < FromArchive > ( )
. GroupBy ( e = > e . ArchiveHashPath [ 0 ] )
. ToDictionary ( k = > k . Key ) ;
var archives = ModList . Archives
. Select ( a = > new { Archive = a , AbsolutePath = HashedArchives . GetOrDefault ( a . Hash ) } )
. Where ( a = > a . AbsolutePath ! = null )
. ToList ( ) ;
Info ( "Installing Archives" ) ;
2019-12-23 23:28:47 +00:00
await archives . PMap ( Queue , UpdateTracker , a = > InstallArchive ( Queue , a . Archive , a . AbsolutePath , grouped [ a . Archive . Hash ] ) ) ;
2019-11-16 13:22:40 +00:00
}
2019-12-23 23:28:47 +00:00
private async Task InstallArchive ( WorkQueue queue , Archive archive , string absolutePath , IGrouping < string , FromArchive > grouping )
2019-11-16 13:22:40 +00:00
{
Status ( $"Extracting {archive.Name}" ) ;
List < FromArchive > vFiles = grouping . Select ( g = >
{
var file = VFS . Index . FileForArchiveHashPath ( g . ArchiveHashPath ) ;
g . FromFile = file ;
return g ;
} ) . ToList ( ) ;
2019-12-04 01:26:26 +00:00
var onFinish = await VFS . Stage ( vFiles . Select ( f = > f . FromFile ) . Distinct ( ) ) ;
2019-11-16 13:22:40 +00:00
Status ( $"Copying files for {archive.Name}" ) ;
void CopyFile ( string from , string to , bool useMove )
{
if ( File . Exists ( to ) )
{
var fi = new FileInfo ( to ) ;
if ( fi . IsReadOnly )
fi . IsReadOnly = false ;
File . Delete ( to ) ;
}
if ( File . Exists ( from ) )
{
var fi = new FileInfo ( from ) ;
if ( fi . IsReadOnly )
fi . IsReadOnly = false ;
}
if ( useMove )
File . Move ( from , to ) ;
else
File . Copy ( from , to ) ;
2019-11-18 05:21:24 +00:00
// If we don't do this, the file will use the last-modified date of the file when it was compressed
// into an archive, which isn't really what we want in the case of files installed archives
File . SetLastWriteTime ( to , DateTime . Now ) ;
2019-11-16 13:22:40 +00:00
}
2019-12-23 23:28:47 +00:00
await vFiles . GroupBy ( f = > f . FromFile )
. PDoIndexed ( queue , ( idx , group ) = >
2019-11-16 13:22:40 +00:00
{
Utils . Status ( "Installing files" , idx * 100 / vFiles . Count ) ;
var firstDest = Path . Combine ( OutputFolder , group . First ( ) . To ) ;
CopyFile ( group . Key . StagedPath , firstDest , true ) ;
foreach ( var copy in group . Skip ( 1 ) )
{
var nextDest = Path . Combine ( OutputFolder , copy . To ) ;
CopyFile ( firstDest , nextDest , false ) ;
}
} ) ;
Status ( "Unstaging files" ) ;
onFinish ( ) ;
// Now patch all the files from this archive
foreach ( var toPatch in grouping . OfType < PatchedFromArchive > ( ) )
using ( var patchStream = new MemoryStream ( ) )
{
Status ( $"Patching {Path.GetFileName(toPatch.To)}" ) ;
// Read in the patch data
byte [ ] patchData = LoadBytesFromPath ( toPatch . PatchID ) ;
var toFile = Path . Combine ( OutputFolder , toPatch . To ) ;
var oldData = new MemoryStream ( File . ReadAllBytes ( toFile ) ) ;
// Remove the file we're about to patch
File . Delete ( toFile ) ;
// Patch it
using ( var outStream = File . OpenWrite ( toFile ) )
{
BSDiff . Apply ( oldData , ( ) = > new MemoryStream ( patchData ) , outStream ) ;
}
Status ( $"Verifying Patch {Path.GetFileName(toPatch.To)}" ) ;
var resultSha = toFile . FileHash ( ) ;
if ( resultSha ! = toPatch . Hash )
throw new InvalidDataException ( $"Invalid Hash for {toPatch.To} after patching" ) ;
}
}
2019-12-04 01:26:26 +00:00
public async Task DownloadArchives ( )
2019-11-16 13:22:40 +00:00
{
var missing = ModList . Archives . Where ( a = > ! HashedArchives . ContainsKey ( a . Hash ) ) . ToList ( ) ;
Info ( $"Missing {missing.Count} archives" ) ;
Info ( "Getting Nexus API Key, if a browser appears, please accept" ) ;
var dispatchers = missing . Select ( m = > m . State . GetDownloader ( ) ) . Distinct ( ) ;
2019-12-07 02:45:13 +00:00
await Task . WhenAll ( dispatchers . Select ( d = > d . Prepare ( ) ) ) ;
2019-11-16 13:22:40 +00:00
2019-12-04 01:26:26 +00:00
await DownloadMissingArchives ( missing ) ;
2019-11-16 13:22:40 +00:00
}
2019-12-04 01:26:26 +00:00
private async Task DownloadMissingArchives ( List < Archive > missing , bool download = true )
2019-11-16 13:22:40 +00:00
{
if ( download )
{
foreach ( var a in missing . Where ( a = > a . State . GetType ( ) = = typeof ( ManualDownloader . State ) ) )
{
var outputPath = Path . Combine ( DownloadFolder , a . Name ) ;
2019-12-06 05:29:17 +00:00
await a . State . Download ( a , outputPath ) ;
2019-11-16 13:22:40 +00:00
}
}
2019-12-04 01:26:26 +00:00
await missing . Where ( a = > a . State . GetType ( ) ! = typeof ( ManualDownloader . State ) )
2019-12-06 05:29:17 +00:00
. PMap ( Queue , async archive = >
2019-11-16 13:22:40 +00:00
{
Info ( $"Downloading {archive.Name}" ) ;
var outputPath = Path . Combine ( DownloadFolder , archive . Name ) ;
if ( download )
if ( outputPath . FileExists ( ) )
File . Delete ( outputPath ) ;
2019-12-06 05:29:17 +00:00
return await DownloadArchive ( archive , download ) ;
2019-11-16 13:22:40 +00:00
} ) ;
}
2019-12-06 05:29:17 +00:00
public async Task < bool > DownloadArchive ( Archive archive , bool download )
2019-11-16 13:22:40 +00:00
{
try
{
2019-11-20 00:15:46 +00:00
var path = Path . Combine ( DownloadFolder , archive . Name ) ;
2019-12-06 05:29:17 +00:00
await archive . State . Download ( archive , path ) ;
2019-11-20 00:15:46 +00:00
path . FileHashCached ( ) ;
2019-11-16 13:22:40 +00:00
}
catch ( Exception ex )
{
Utils . Log ( $"Download error for file {archive.Name}" ) ;
Utils . Log ( ex . ToString ( ) ) ;
return false ;
}
return false ;
}
2019-12-04 01:26:26 +00:00
public async Task HashArchives ( )
2019-11-16 13:22:40 +00:00
{
2019-12-04 01:26:26 +00:00
var hashResults = await Directory . EnumerateFiles ( DownloadFolder )
2019-11-20 00:15:46 +00:00
. Where ( e = > ! e . EndsWith ( Consts . HashFileExtension ) )
2019-12-04 01:26:26 +00:00
. PMap ( Queue , e = > ( e . FileHashCached ( ) , e ) ) ;
HashedArchives = hashResults
2019-11-16 13:22:40 +00:00
. OrderByDescending ( e = > File . GetLastWriteTime ( e . Item2 ) )
. GroupBy ( e = > e . Item1 )
. Select ( e = > e . First ( ) )
. ToDictionary ( e = > e . Item1 , e = > e . Item2 ) ;
}
2019-11-24 21:36:26 +00:00
/// <summary>
/// Disabled
/// </summary>
2019-11-20 23:39:03 +00:00
public void ValidateFreeSpace ( )
{
2019-11-24 21:36:26 +00:00
return ;
// Disabled, caused more problems than it was worth.
/ *
DiskSpaceInfo DriveInfo ( string path )
2019-11-20 23:39:03 +00:00
{
return Volume . GetDiskFreeSpace ( Volume . GetUniqueVolumeNameForPath ( path ) ) ;
}
var paths = new [ ] { ( OutputFolder , ModList . InstallSize ) ,
( DownloadFolder , ModList . DownloadSize ) ,
( Directory . GetCurrentDirectory ( ) , ModList . ScratchSpaceSize ) } ;
paths . GroupBy ( f = > DriveInfo ( f . Item1 ) . DriveName )
. Do ( g = >
{
var required = g . Sum ( i = > i . Item2 ) ;
2019-11-24 00:04:34 +00:00
var contains = g . Sum ( folder = >
Directory . EnumerateFiles ( folder . Item1 , "*" , DirectoryEnumerationOptions . Recursive )
. Sum ( file = > new FileInfo ( file ) . Length ) ) ;
2019-11-20 23:39:03 +00:00
var available = DriveInfo ( g . Key ) . FreeBytesAvailable ;
2019-11-24 00:04:34 +00:00
if ( required - contains > available )
2019-11-20 23:39:03 +00:00
throw new NotEnoughDiskSpaceException (
$"This modlist requires {required.ToFileSizeString()} on {g.Key} but only {available.ToFileSizeString()} is available." ) ;
} ) ;
2019-11-24 21:36:26 +00:00
* /
2019-11-20 23:39:03 +00:00
}
2019-12-04 01:26:26 +00:00
public async Task < int > RecommendQueueSize ( )
2019-11-20 23:39:03 +00:00
{
2019-12-19 16:09:31 +00:00
const ulong GB = ( 1024 * 1024 * 1024 ) ;
// Most of the heavy lifting is done on the scratch disk, so we'll use the value from that disk
var memory = Utils . GetMemoryStatus ( ) ;
// Assume roughly 2GB of ram needed to extract each 7zip archive, and then leave 2GB for the OS
var based_on_memory = ( memory . ullTotalPhys - ( 2 * GB ) ) / ( 2 * GB ) ;
2019-12-04 01:26:26 +00:00
var scratch_size = await RecommendQueueSize ( Directory . GetCurrentDirectory ( ) ) ;
2019-12-19 16:09:31 +00:00
var result = Math . Min ( ( int ) based_on_memory , ( int ) scratch_size ) ;
Utils . Log ( $"Recommending a queue size of {result} based on disk performance, number of cores, and {((long)memory.ullTotalPhys).ToFileSizeString()} of system RAM" ) ;
2019-11-20 23:39:03 +00:00
return result ;
}
2019-11-18 05:21:24 +00:00
/// <summary>
/// The user may already have some files in the OutputFolder. If so we can go through these and
/// figure out which need to be updated, deleted, or left alone
/// </summary>
2019-12-04 01:26:26 +00:00
public async Task OptimizeModlist ( )
2019-11-18 05:21:24 +00:00
{
Utils . Log ( "Optimizing Modlist directives" ) ;
var indexed = ModList . Directives . ToDictionary ( d = > d . To ) ;
2019-11-24 23:03:36 +00:00
UpdateTracker . NextStep ( "Looking for files to delete" ) ;
2019-12-04 01:26:26 +00:00
await Directory . EnumerateFiles ( OutputFolder , "*" , DirectoryEnumerationOptions . Recursive )
2019-11-24 23:03:36 +00:00
. PMap ( Queue , UpdateTracker , f = >
2019-11-20 00:15:46 +00:00
{
var relative_to = f . RelativeTo ( OutputFolder ) ;
Utils . Status ( $"Checking if modlist file {relative_to}" ) ;
2019-12-12 22:52:24 +00:00
if ( indexed . ContainsKey ( relative_to ) | | f . IsInPath ( DownloadFolder ) )
2019-11-20 00:15:46 +00:00
return ;
Utils . Log ( $"Deleting {relative_to} it's not part of this modlist" ) ;
File . Delete ( f ) ;
} ) ;
2019-11-24 23:03:36 +00:00
UpdateTracker . NextStep ( "Looking for unmodified files" ) ;
2019-12-04 01:26:26 +00:00
( await indexed . Values . PMap ( Queue , UpdateTracker , d = >
2019-11-18 05:21:24 +00:00
{
// Bit backwards, but we want to return null for
// all files we *want* installed. We return the files
// to remove from the install list.
2019-11-20 00:15:46 +00:00
Status ( $"Optimizing {d.To}" ) ;
2019-11-18 05:21:24 +00:00
var path = Path . Combine ( OutputFolder , d . To ) ;
if ( ! File . Exists ( path ) ) return null ;
var fi = new FileInfo ( path ) ;
if ( fi . Length ! = d . Size ) return null ;
return path . FileHash ( ) = = d . Hash ? d : null ;
2019-12-04 01:26:26 +00:00
} ) )
. Where ( d = > d ! = null )
2019-11-18 05:21:24 +00:00
. Do ( d = > indexed . Remove ( d . To ) ) ;
2020-01-05 05:07:00 +00:00
Utils . Log ( "Cleaning empty folders" ) ;
var expectedFolders = indexed . Keys . SelectMany ( path = >
{
// Get all the folders and all the folder parents
// so for foo\bar\baz\qux.txt this emits ["foo", "foo\\bar", "foo\\bar\\baz"]
var split = path . Split ( '\\' ) ;
return Enumerable . Range ( 1 , split . Length - 1 ) . Select ( t = > string . Join ( "\\" , split . Take ( t ) ) ) ;
} ) . Distinct ( )
. Select ( p = > Path . Combine ( OutputFolder , p ) )
. ToHashSet ( ) ;
try
{
Directory . EnumerateDirectories ( OutputFolder , DirectoryEnumerationOptions . Recursive )
. Where ( p = > ! expectedFolders . Contains ( p ) )
. OrderByDescending ( p = > p . Length )
. Do ( p = > Directory . Delete ( p ) ) ;
}
catch ( Exception )
{
// ignored because it's not worth throwing a fit over
Utils . Log ( "Error when trying to clean empty folders. This doesn't really matter." ) ;
}
2019-11-24 23:03:36 +00:00
UpdateTracker . NextStep ( "Updating Modlist" ) ;
2019-11-18 05:21:24 +00:00
Utils . Log ( $"Optimized {ModList.Directives.Count} directives to {indexed.Count} required" ) ;
var requiredArchives = indexed . Values . OfType < FromArchive > ( )
. GroupBy ( d = > d . ArchiveHashPath [ 0 ] )
. Select ( d = > d . Key )
. ToHashSet ( ) ;
ModList . Archives = ModList . Archives . Where ( a = > requiredArchives . Contains ( a . Hash ) ) . ToList ( ) ;
ModList . Directives = indexed . Values . ToList ( ) ;
}
2019-11-16 13:22:40 +00:00
}
2019-11-20 23:39:03 +00:00
public class NotEnoughDiskSpaceException : Exception
{
public NotEnoughDiskSpaceException ( string s ) : base ( s )
{
}
}
2019-11-16 13:22:40 +00:00
}