mirror of
https://github.com/wabbajack-tools/wabbajack.git
synced 2024-08-30 18:42:17 +00:00
Common and BSA routines now use async IO exclusively
This commit is contained in:
parent
f9dc9148e7
commit
ea08c9865d
@ -58,7 +58,7 @@ namespace Compression.BSA
|
||||
public async Task Build(AbsolutePath filename)
|
||||
{
|
||||
SortEntries();
|
||||
await using var fs = filename.Create();
|
||||
await using var fs = await filename.Create();
|
||||
await using var bw = new BinaryWriter(fs);
|
||||
|
||||
bw.Write(Encoding.ASCII.GetBytes(_state.HeaderMagic));
|
||||
|
@ -38,19 +38,22 @@ namespace Compression.BSA
|
||||
|
||||
public bool HasNameTable => _nameTableOffset > 0;
|
||||
|
||||
public BA2Reader(AbsolutePath filename) : this(filename.OpenRead())
|
||||
|
||||
|
||||
public static async Task<BA2Reader> Load(AbsolutePath filename)
|
||||
{
|
||||
_filename = filename;
|
||||
var rdr = new BA2Reader(await filename.OpenShared()) {_filename = filename};
|
||||
await rdr.LoadHeaders();
|
||||
return rdr;
|
||||
}
|
||||
|
||||
public BA2Reader(Stream stream)
|
||||
private BA2Reader(Stream stream)
|
||||
{
|
||||
_stream = stream;
|
||||
_rdr = new BinaryReader(_stream, Encoding.UTF7);
|
||||
LoadHeaders();
|
||||
}
|
||||
|
||||
public void LoadHeaders()
|
||||
private async Task LoadHeaders()
|
||||
{
|
||||
_headerMagic = Encoding.ASCII.GetString(_rdr.ReadBytes(4));
|
||||
|
||||
@ -196,39 +199,36 @@ namespace Compression.BSA
|
||||
|
||||
public uint HeaderSize => DDS.HeaderSizeForFormat((DXGI_FORMAT)_format);
|
||||
|
||||
public void CopyDataTo(Stream output)
|
||||
public async ValueTask CopyDataTo(Stream output)
|
||||
{
|
||||
var bw = new BinaryWriter(output);
|
||||
|
||||
WriteHeader(bw);
|
||||
|
||||
using (var fs = _bsa._filename.OpenRead())
|
||||
using (var br = new BinaryReader(fs))
|
||||
await using var fs = await _bsa._filename.OpenRead();
|
||||
using var br = new BinaryReader(fs);
|
||||
foreach (var chunk in _chunks)
|
||||
{
|
||||
foreach (var chunk in _chunks)
|
||||
var full = new byte[chunk._fullSz];
|
||||
var isCompressed = chunk._packSz != 0;
|
||||
|
||||
br.BaseStream.Seek((long)chunk._offset, SeekOrigin.Begin);
|
||||
|
||||
if (!isCompressed)
|
||||
{
|
||||
var full = new byte[chunk._fullSz];
|
||||
var isCompressed = chunk._packSz != 0;
|
||||
|
||||
br.BaseStream.Seek((long)chunk._offset, SeekOrigin.Begin);
|
||||
|
||||
if (!isCompressed)
|
||||
{
|
||||
br.BaseStream.Read(full, 0, full.Length);
|
||||
}
|
||||
else
|
||||
{
|
||||
byte[] compressed = new byte[chunk._packSz];
|
||||
br.BaseStream.Read(compressed, 0, compressed.Length);
|
||||
var inflater = new Inflater();
|
||||
inflater.SetInput(compressed);
|
||||
inflater.Inflate(full);
|
||||
}
|
||||
|
||||
bw.BaseStream.Write(full, 0, full.Length);
|
||||
await br.BaseStream.ReadAsync(full, 0, full.Length);
|
||||
}
|
||||
else
|
||||
{
|
||||
byte[] compressed = new byte[chunk._packSz];
|
||||
await br.BaseStream.ReadAsync(compressed, 0, compressed.Length);
|
||||
var inflater = new Inflater();
|
||||
inflater.SetInput(compressed);
|
||||
inflater.Inflate(full);
|
||||
}
|
||||
}
|
||||
|
||||
await bw.BaseStream.WriteAsync(full, 0, full.Length);
|
||||
}
|
||||
}
|
||||
|
||||
public void Dump(Action<string> print)
|
||||
@ -480,31 +480,28 @@ namespace Compression.BSA
|
||||
public uint Size => _realSize;
|
||||
public FileStateObject State => new BA2FileEntryState(this);
|
||||
|
||||
public void CopyDataTo(Stream output)
|
||||
public async ValueTask CopyDataTo(Stream output)
|
||||
{
|
||||
using (var fs = _bsa._filename.OpenRead())
|
||||
await using var fs = await _bsa._filename.OpenRead();
|
||||
fs.Seek((long) _offset, SeekOrigin.Begin);
|
||||
uint len = Compressed ? _size : _realSize;
|
||||
|
||||
var bytes = new byte[len];
|
||||
fs.Read(bytes, 0, (int) len);
|
||||
|
||||
if (!Compressed)
|
||||
{
|
||||
fs.Seek((long) _offset, SeekOrigin.Begin);
|
||||
uint len = Compressed ? _size : _realSize;
|
||||
|
||||
var bytes = new byte[len];
|
||||
fs.Read(bytes, 0, (int) len);
|
||||
|
||||
if (!Compressed)
|
||||
{
|
||||
output.Write(bytes, 0, bytes.Length);
|
||||
}
|
||||
else
|
||||
{
|
||||
var uncompressed = new byte[_realSize];
|
||||
var inflater = new Inflater();
|
||||
inflater.SetInput(bytes);
|
||||
inflater.Inflate(uncompressed);
|
||||
output.Write(uncompressed, 0, uncompressed.Length);
|
||||
}
|
||||
await output.WriteAsync(bytes, 0, bytes.Length);
|
||||
}
|
||||
else
|
||||
{
|
||||
var uncompressed = new byte[_realSize];
|
||||
var inflater = new Inflater();
|
||||
inflater.SetInput(bytes);
|
||||
inflater.Inflate(uncompressed);
|
||||
await output.WriteAsync(uncompressed, 0, uncompressed.Length);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
[JsonName("BA2FileEntryState")]
|
||||
|
@ -95,7 +95,7 @@ namespace Compression.BSA
|
||||
public async Task Build(AbsolutePath outputName)
|
||||
{
|
||||
RegenFolderRecords();
|
||||
await using var fs = outputName.Create();
|
||||
await using var fs = await outputName.Create();
|
||||
await using var wtr = new BinaryWriter(fs);
|
||||
|
||||
wtr.Write(_fileId);
|
||||
|
@ -1,33 +1,34 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using Wabbajack.Common;
|
||||
|
||||
namespace Compression.BSA
|
||||
{
|
||||
public static class BSADispatch
|
||||
{
|
||||
public static IBSAReader OpenRead(AbsolutePath filename)
|
||||
public static async ValueTask<IBSAReader> OpenRead(AbsolutePath filename)
|
||||
{
|
||||
var fourcc = "";
|
||||
using (var file = filename.OpenRead())
|
||||
using (var file = await filename.OpenRead())
|
||||
{
|
||||
fourcc = Encoding.ASCII.GetString(new BinaryReader(file).ReadBytes(4));
|
||||
}
|
||||
|
||||
if (fourcc == TES3Reader.TES3_MAGIC)
|
||||
return new TES3Reader(filename);
|
||||
return await TES3Reader.Load(filename);
|
||||
if (fourcc == "BSA\0")
|
||||
return new BSAReader(filename);
|
||||
return await BSAReader.Load(filename);
|
||||
if (fourcc == "BTDX")
|
||||
return new BA2Reader(filename);
|
||||
return await BA2Reader.Load(filename);
|
||||
throw new InvalidDataException("Filename is not a .bsa or .ba2, magic " + fourcc);
|
||||
}
|
||||
|
||||
private static HashSet<string> MagicStrings = new HashSet<string> {TES3Reader.TES3_MAGIC, "BSA\0", "BTDX"};
|
||||
public static bool MightBeBSA(AbsolutePath filename)
|
||||
public static async ValueTask<bool> MightBeBSA(AbsolutePath filename)
|
||||
{
|
||||
using var file = filename.OpenRead();
|
||||
using var file = await filename.OpenRead();
|
||||
var fourcc = Encoding.ASCII.GetString(new BinaryReader(file).ReadBytes(4));
|
||||
return MagicStrings.Contains(fourcc);
|
||||
}
|
||||
|
@ -60,8 +60,8 @@ namespace Compression.BSA
|
||||
internal uint _folderRecordOffset;
|
||||
private List<FolderRecord> _folders;
|
||||
internal string _magic;
|
||||
private readonly BinaryReader _rdr;
|
||||
private readonly Stream _stream;
|
||||
private BinaryReader _rdr;
|
||||
private Stream _stream;
|
||||
internal uint _totalFileNameLength;
|
||||
internal uint _totalFolderNameLength;
|
||||
internal uint _version;
|
||||
@ -80,16 +80,15 @@ namespace Compression.BSA
|
||||
}
|
||||
|
||||
|
||||
public BSAReader(AbsolutePath filename)
|
||||
public static async ValueTask<BSAReader> Load(AbsolutePath filename)
|
||||
{
|
||||
_fileName = filename;
|
||||
using var stream = filename.OpenRead();
|
||||
using var stream = await filename.OpenRead();
|
||||
using var br = new BinaryReader(stream);
|
||||
_rdr = br;
|
||||
_stream = stream;
|
||||
LoadHeaders();
|
||||
_rdr = null;
|
||||
_stream = null;
|
||||
var bsa = new BSAReader {_rdr = br, _stream = stream, _fileName = filename};
|
||||
await bsa.LoadHeaders();
|
||||
bsa._rdr = null;
|
||||
bsa._stream = null;
|
||||
return bsa;
|
||||
}
|
||||
|
||||
public IEnumerable<IFile> Files
|
||||
@ -132,7 +131,7 @@ namespace Compression.BSA
|
||||
{
|
||||
}
|
||||
|
||||
private void LoadHeaders()
|
||||
private async ValueTask LoadHeaders()
|
||||
{
|
||||
var fourcc = Encoding.ASCII.GetString(_rdr.ReadBytes(4));
|
||||
|
||||
@ -330,43 +329,34 @@ namespace Compression.BSA
|
||||
_name = rdr.ReadStringTerm(_bsa.HeaderType);
|
||||
}
|
||||
|
||||
public void CopyDataTo(Stream output)
|
||||
public async ValueTask CopyDataTo(Stream output)
|
||||
{
|
||||
using (var in_file = _bsa._fileName.OpenRead())
|
||||
using (var rdr = new BinaryReader(in_file))
|
||||
{
|
||||
rdr.BaseStream.Position = _dataOffset;
|
||||
await using var in_file = await _bsa._fileName.OpenRead();
|
||||
using var rdr = new BinaryReader(in_file);
|
||||
rdr.BaseStream.Position = _dataOffset;
|
||||
|
||||
if (_bsa.HeaderType == VersionType.SSE)
|
||||
if (_bsa.HeaderType == VersionType.SSE)
|
||||
{
|
||||
if (Compressed)
|
||||
{
|
||||
if (Compressed)
|
||||
{
|
||||
using var r = LZ4Stream.Decode(rdr.BaseStream);
|
||||
r.CopyToLimit(output, (int) _originalSize);
|
||||
}
|
||||
else
|
||||
{
|
||||
rdr.BaseStream.CopyToLimit(output, (int) _onDiskSize);
|
||||
}
|
||||
using var r = LZ4Stream.Decode(rdr.BaseStream);
|
||||
await r.CopyToLimitAsync(output, (int) _originalSize);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (Compressed)
|
||||
{
|
||||
using var z = new InflaterInputStream(rdr.BaseStream);
|
||||
z.CopyToLimit(output, (int) _originalSize);
|
||||
}
|
||||
else
|
||||
rdr.BaseStream.CopyToLimit(output, (int) _onDiskSize);
|
||||
await rdr.BaseStream.CopyToLimitAsync(output, (int) _onDiskSize);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public byte[] GetData()
|
||||
{
|
||||
var ms = new MemoryStream();
|
||||
CopyDataTo(ms);
|
||||
return ms.ToArray();
|
||||
else
|
||||
{
|
||||
if (Compressed)
|
||||
{
|
||||
await using var z = new InflaterInputStream(rdr.BaseStream);
|
||||
await z.CopyToLimitAsync(output, (int) _originalSize);
|
||||
}
|
||||
else
|
||||
await rdr.BaseStream.CopyToLimitAsync(output, (int) _onDiskSize);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -60,7 +60,7 @@ namespace Compression.BSA
|
||||
/// in order to maintain thread-safe access.
|
||||
/// </summary>
|
||||
/// <param name="output"></param>
|
||||
void CopyDataTo(Stream output);
|
||||
ValueTask CopyDataTo(Stream output);
|
||||
|
||||
void Dump(Action<string> print);
|
||||
}
|
||||
|
@ -26,7 +26,7 @@ namespace Compression.BSA
|
||||
|
||||
public async Task Build(AbsolutePath filename)
|
||||
{
|
||||
await using var fs = filename.Create();
|
||||
await using var fs = await filename.Create();
|
||||
await using var bw = new BinaryWriter(fs);
|
||||
|
||||
bw.Write(_state.VersionNumber);
|
||||
|
@ -18,48 +18,52 @@ namespace Compression.BSA
|
||||
internal long _dataOffset;
|
||||
internal AbsolutePath _filename;
|
||||
|
||||
public TES3Reader(AbsolutePath filename)
|
||||
public static async ValueTask<TES3Reader> Load(AbsolutePath filename)
|
||||
{
|
||||
_filename = filename;
|
||||
using var fs = filename.OpenRead();
|
||||
await using var fs = await filename.OpenRead();
|
||||
using var br = new BinaryReader(fs);
|
||||
_versionNumber = br.ReadUInt32();
|
||||
_hashTableOffset = br.ReadUInt32();
|
||||
_fileCount = br.ReadUInt32();
|
||||
var rdr = new TES3Reader
|
||||
{
|
||||
_filename = filename,
|
||||
_versionNumber = br.ReadUInt32(),
|
||||
_hashTableOffset = br.ReadUInt32(),
|
||||
_fileCount = br.ReadUInt32()
|
||||
};
|
||||
|
||||
_files = new TES3FileEntry[_fileCount];
|
||||
for (int i = 0; i < _fileCount; i++)
|
||||
rdr._files = new TES3FileEntry[rdr._fileCount];
|
||||
for (int i = 0; i < rdr._fileCount; i++)
|
||||
{
|
||||
var file = new TES3FileEntry {
|
||||
Index = i,
|
||||
Archive = this,
|
||||
Archive = rdr,
|
||||
Size = br.ReadUInt32(),
|
||||
Offset = br.ReadUInt32()
|
||||
|
||||
};
|
||||
_files[i] = file;
|
||||
rdr._files[i] = file;
|
||||
}
|
||||
|
||||
for (int i = 0; i < _fileCount; i++)
|
||||
for (int i = 0; i < rdr._fileCount; i++)
|
||||
{
|
||||
_files[i].NameOffset = br.ReadUInt32();
|
||||
rdr._files[i].NameOffset = br.ReadUInt32();
|
||||
}
|
||||
|
||||
var origPos = br.BaseStream.Position;
|
||||
for (int i = 0; i < _fileCount; i++)
|
||||
for (int i = 0; i < rdr._fileCount; i++)
|
||||
{
|
||||
br.BaseStream.Position = origPos + _files[i].NameOffset;
|
||||
_files[i].Path = new RelativePath(br.ReadStringTerm(VersionType.TES3));
|
||||
br.BaseStream.Position = origPos + rdr._files[i].NameOffset;
|
||||
rdr._files[i].Path = new RelativePath(br.ReadStringTerm(VersionType.TES3));
|
||||
}
|
||||
|
||||
br.BaseStream.Position = _hashTableOffset + 12;
|
||||
for (int i = 0; i < _fileCount; i++)
|
||||
br.BaseStream.Position = rdr._hashTableOffset + 12;
|
||||
for (int i = 0; i < rdr._fileCount; i++)
|
||||
{
|
||||
_files[i].Hash1 = br.ReadUInt32();
|
||||
_files[i].Hash2 = br.ReadUInt32();
|
||||
rdr._files[i].Hash1 = br.ReadUInt32();
|
||||
rdr._files[i].Hash2 = br.ReadUInt32();
|
||||
}
|
||||
|
||||
_dataOffset = br.BaseStream.Position;
|
||||
rdr._dataOffset = br.BaseStream.Position;
|
||||
return rdr;
|
||||
}
|
||||
|
||||
public async ValueTask DisposeAsync()
|
||||
@ -118,11 +122,11 @@ namespace Compression.BSA
|
||||
Hash2 = Hash2
|
||||
};
|
||||
|
||||
public void CopyDataTo(Stream output)
|
||||
public async ValueTask CopyDataTo(Stream output)
|
||||
{
|
||||
using var fs = Archive._filename.OpenRead();
|
||||
await using var fs = await Archive._filename.OpenRead();
|
||||
fs.Position = Archive._dataOffset + Offset;
|
||||
fs.CopyToLimit(output, (int)Size);
|
||||
await fs.CopyToLimitAsync(output, (int)Size);
|
||||
}
|
||||
|
||||
public void Dump(Action<string> print)
|
||||
|
@ -16,7 +16,7 @@ namespace Wabbajack.CLI.Verbs
|
||||
|
||||
protected override async Task<ExitCode> Run()
|
||||
{
|
||||
File.WriteAllBytes(Output, await Utils.FromEncryptedData(Name));
|
||||
await Output.RelativeTo(AbsolutePath.EntryPoint).WriteAllBytesAsync(await Utils.FromEncryptedData(Name));
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
@ -11,20 +11,20 @@ namespace Wabbajack.Common.Test
|
||||
[Fact]
|
||||
public async Task CanDetectNewEncryptedData()
|
||||
{
|
||||
var test_string = Guid.NewGuid().ToString();
|
||||
var testString = Guid.NewGuid().ToString();
|
||||
var data = new ConcurrentBag<string>();
|
||||
var events = Utils.HaveEncryptedJsonObservable(test_string).Subscribe(e =>
|
||||
var events = Utils.HaveEncryptedJsonObservable(testString).Subscribe(e =>
|
||||
{
|
||||
if (e)
|
||||
data.Add(test_string);
|
||||
data.Add(testString);
|
||||
else
|
||||
data.Clear();
|
||||
});
|
||||
|
||||
test_string.ToEcryptedJson(test_string);
|
||||
await testString.ToEcryptedJson(testString);
|
||||
await Task.Delay(100);
|
||||
|
||||
Assert.Contains(test_string, data);
|
||||
Assert.Contains(testString, data);
|
||||
|
||||
|
||||
}
|
||||
|
@ -1,28 +1,29 @@
|
||||
using Xunit;
|
||||
using System.Threading.Tasks;
|
||||
using Xunit;
|
||||
|
||||
namespace Wabbajack.Common.Test
|
||||
{
|
||||
public class PathTests
|
||||
{
|
||||
[Fact]
|
||||
public void CanDeleteReadOnlyFile()
|
||||
public async Task CanDeleteReadOnlyFile()
|
||||
{
|
||||
var tempFile = new TempFile();
|
||||
tempFile.Path.WriteAllText("Test");
|
||||
await tempFile.Path.WriteAllTextAsync("Test");
|
||||
tempFile.Path.SetReadOnly(true);
|
||||
|
||||
tempFile.Path.Delete();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CanMoveReadOnlyFiles()
|
||||
public async Task CanMoveReadOnlyFiles()
|
||||
{
|
||||
var tempFile = new TempFile();
|
||||
var tempFile2 = new TempFile();
|
||||
tempFile.Path.WriteAllText("Test");
|
||||
await tempFile.Path.WriteAllTextAsync("Test");
|
||||
tempFile.Path.SetReadOnly(true);
|
||||
|
||||
tempFile.Path.MoveTo(tempFile2.Path);
|
||||
await tempFile.Path.MoveToAsync(tempFile2.Path);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
|
@ -129,21 +129,6 @@ namespace Wabbajack.Common
|
||||
return sha.Hash.ToHex();
|
||||
}
|
||||
|
||||
public static Hash FileHash(this AbsolutePath file, bool nullOnIoError = false)
|
||||
{
|
||||
try
|
||||
{
|
||||
using var fs = file.OpenRead();
|
||||
var config = new xxHashConfig {HashSizeInBits = 64};
|
||||
using var f = new StatusFileStream(fs, $"Hashing {(string)file.FileName}");
|
||||
return new Hash(BitConverter.ToUInt64(xxHashFactory.Instance.Create(config).ComputeHash(f).Hash));
|
||||
}
|
||||
catch (IOException)
|
||||
{
|
||||
if (nullOnIoError) return Hash.Empty;
|
||||
throw;
|
||||
}
|
||||
}
|
||||
public static Hash xxHash(this byte[] data)
|
||||
{
|
||||
var hash = new xxHashConfig();
|
||||
@ -166,16 +151,6 @@ namespace Wabbajack.Common
|
||||
var value = xxHashFactory.Instance.Create(config).ComputeHash(f);
|
||||
return Hash.FromULong(BitConverter.ToUInt64(value.Hash));
|
||||
}
|
||||
public static Hash FileHashCached(this AbsolutePath file, bool nullOnIoError = false)
|
||||
{
|
||||
if (TryGetHashCache(file, out var foundHash)) return foundHash;
|
||||
|
||||
var hash = file.FileHash(nullOnIoError);
|
||||
if (hash != Hash.Empty)
|
||||
WriteHashCache(file, hash);
|
||||
return hash;
|
||||
}
|
||||
|
||||
public static bool TryGetHashCache(AbsolutePath file, out Hash hash)
|
||||
{
|
||||
var normPath = Encoding.UTF8.GetBytes(file.Normalize());
|
||||
@ -223,7 +198,7 @@ namespace Wabbajack.Common
|
||||
{
|
||||
try
|
||||
{
|
||||
await using var fs = file.OpenRead();
|
||||
await using var fs = await file.OpenRead();
|
||||
var config = new xxHashConfig {HashSizeInBits = 64};
|
||||
await using var hs = new StatusFileStream(fs, $"Hashing {file}");
|
||||
var value = await xxHashFactory.Instance.Create(config).ComputeHashAsync(hs);
|
||||
|
@ -5,6 +5,7 @@ using System.IO;
|
||||
using System.Linq;
|
||||
using System.Reflection;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using Newtonsoft.Json;
|
||||
using Newtonsoft.Json.Serialization;
|
||||
using Wabbajack.Common.Serialization.Json;
|
||||
@ -57,9 +58,9 @@ namespace Wabbajack.Common
|
||||
ser.Serialize(writer, obj);
|
||||
}
|
||||
|
||||
public static void ToJson<T>(this T obj, AbsolutePath path)
|
||||
public static async ValueTask ToJsonAsync<T>(this T obj, AbsolutePath path)
|
||||
{
|
||||
using var fs = path.Create();
|
||||
await using var fs = await path.Create();
|
||||
obj.ToJson(fs);
|
||||
}
|
||||
|
||||
|
@ -17,7 +17,7 @@ namespace Wabbajack.Common
|
||||
{
|
||||
if (!Utils.HaveEncryptedJson(Consts.MetricsKeyHeader))
|
||||
{
|
||||
Utils.ToEcryptedJson(Utils.MakeRandomKey(), Consts.MetricsKeyHeader);
|
||||
Utils.MakeRandomKey().ToEcryptedJson(Consts.MetricsKeyHeader).AsTask().Wait();
|
||||
}
|
||||
}
|
||||
/// <summary>
|
||||
|
@ -68,7 +68,7 @@ namespace Wabbajack.Common
|
||||
if (LogFile.Exists)
|
||||
{
|
||||
var newPath = Consts.LogsFolder.Combine(Consts.EntryPoint.FileNameWithoutExtension + LogFile.LastModified.ToString(" yyyy-MM-dd HH_mm_ss") + ".log");
|
||||
LogFile.MoveTo(newPath, true);
|
||||
LogFile.MoveToAsync(newPath, true).Wait();
|
||||
}
|
||||
|
||||
var logFiles = LogFolder.EnumerateFiles(false).ToList();
|
||||
@ -726,7 +726,7 @@ namespace Wabbajack.Common
|
||||
RETRY_OPEN:
|
||||
try
|
||||
{
|
||||
await using var f = cacheFile.OpenRead();
|
||||
await using var f = await cacheFile.OpenRead();
|
||||
await f.CopyToAsync(output);
|
||||
}
|
||||
catch (IOException)
|
||||
@ -741,7 +741,7 @@ namespace Wabbajack.Common
|
||||
{
|
||||
var tmpName = Consts.PatchCacheFolder.Combine(Guid.NewGuid() + ".tmp");
|
||||
|
||||
await using (var f = tmpName.Create())
|
||||
await using (var f = await tmpName.Create())
|
||||
{
|
||||
Status("Creating Patch");
|
||||
OctoDiff.Create(a, b, f);
|
||||
@ -750,7 +750,7 @@ namespace Wabbajack.Common
|
||||
RETRY:
|
||||
try
|
||||
{
|
||||
tmpName.MoveTo(cacheFile, true);
|
||||
await tmpName.MoveToAsync(cacheFile, true);
|
||||
}
|
||||
catch (UnauthorizedAccessException)
|
||||
{
|
||||
@ -775,7 +775,7 @@ namespace Wabbajack.Common
|
||||
patchStream.Position = 0;
|
||||
var tmpName = Consts.PatchCacheFolder.Combine(Guid.NewGuid() + ".tmp");
|
||||
|
||||
await using (var f = tmpName.Create())
|
||||
await using (var f = await tmpName.Create())
|
||||
{
|
||||
await patchStream.CopyToAsync(f);
|
||||
patchStream.Position = 0;
|
||||
@ -786,7 +786,7 @@ namespace Wabbajack.Common
|
||||
var cacheFile = Consts.PatchCacheFolder.Combine($"{srcHash.ToHex()}_{destHash.ToHex()}.patch");
|
||||
Consts.PatchCacheFolder.CreateDirectory();
|
||||
|
||||
tmpName.MoveTo(cacheFile, true);
|
||||
await tmpName.MoveToAsync(cacheFile, true);
|
||||
}
|
||||
catch (UnauthorizedAccessException)
|
||||
{
|
||||
@ -900,7 +900,7 @@ namespace Wabbajack.Common
|
||||
var startTime = DateTime.Now;
|
||||
var seconds = 2;
|
||||
var results = await Enumerable.Range(0, queue.DesiredNumWorkers)
|
||||
.PMap(queue, idx =>
|
||||
.PMap(queue, async idx =>
|
||||
{
|
||||
var random = new Random();
|
||||
|
||||
@ -908,7 +908,7 @@ namespace Wabbajack.Common
|
||||
long size = 0;
|
||||
byte[] buffer = new byte[1024 * 8];
|
||||
random.NextBytes(buffer);
|
||||
using (var fs = file.Create())
|
||||
await using (var fs = await file.Create())
|
||||
{
|
||||
while (DateTime.Now < startTime + new TimeSpan(0, 0, seconds))
|
||||
{
|
||||
@ -939,7 +939,7 @@ namespace Wabbajack.Common
|
||||
}
|
||||
}
|
||||
var speed = await TestDiskSpeedInner(queue, path);
|
||||
speed.ToJson(benchmarkFile);
|
||||
await speed.ToJsonAsync(benchmarkFile);
|
||||
|
||||
return speed;
|
||||
}
|
||||
@ -1033,10 +1033,10 @@ namespace Wabbajack.Common
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="key"></param>
|
||||
/// <param name="data"></param>
|
||||
public static void ToEcryptedJson<T>(this T data, string key)
|
||||
public static async ValueTask ToEcryptedJson<T>(this T data, string key)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes(data.ToJson());
|
||||
bytes.ToEcryptedData(key);
|
||||
await bytes.ToEcryptedData(key);
|
||||
}
|
||||
|
||||
public static async Task<T> FromEncryptedJson<T>(string key)
|
||||
@ -1046,12 +1046,12 @@ namespace Wabbajack.Common
|
||||
}
|
||||
|
||||
|
||||
public static void ToEcryptedData(this byte[] bytes, string key)
|
||||
public static async ValueTask ToEcryptedData(this byte[] bytes, string key)
|
||||
{
|
||||
var encoded = ProtectedData.Protect(bytes, Encoding.UTF8.GetBytes(key), DataProtectionScope.LocalMachine);
|
||||
Consts.LocalAppDataPath.CreateDirectory();
|
||||
|
||||
Consts.LocalAppDataPath.Combine(key).WriteAllBytes(encoded);
|
||||
await Consts.LocalAppDataPath.Combine(key).WriteAllBytesAsync(encoded);
|
||||
}
|
||||
public static async Task<byte[]> FromEncryptedData(string key)
|
||||
{
|
||||
|
@ -183,7 +183,7 @@ namespace Wabbajack.Test
|
||||
return name;
|
||||
}
|
||||
|
||||
public void VerifyInstalledFile(string mod, string file)
|
||||
public async Task VerifyInstalledFile(string mod, string file)
|
||||
{
|
||||
var src = MO2Folder.Combine((string)Consts.MO2ModFolderName, mod, file);
|
||||
Assert.True(src.Exists);
|
||||
@ -191,8 +191,8 @@ namespace Wabbajack.Test
|
||||
var dest = InstallFolder.Combine((string)Consts.MO2ModFolderName, mod, file);
|
||||
Assert.True(dest.Exists, $"Destination {dest} doesn't exist");
|
||||
|
||||
var srcData = src.ReadAllBytes();
|
||||
var destData = dest.ReadAllBytes();
|
||||
var srcData = await src.ReadAllBytesAsync();
|
||||
var destData = await dest.ReadAllBytesAsync();
|
||||
|
||||
Assert.Equal(srcData.Length, destData.Length);
|
||||
|
||||
@ -203,7 +203,7 @@ namespace Wabbajack.Test
|
||||
}
|
||||
}
|
||||
|
||||
public void VerifyInstalledGameFile(string file)
|
||||
public async Task VerifyInstalledGameFile(string file)
|
||||
{
|
||||
var src = GameFolder.Combine(file);
|
||||
Assert.True(src.Exists);
|
||||
@ -211,8 +211,8 @@ namespace Wabbajack.Test
|
||||
var dest = InstallFolder.Combine((string)Consts.GameFolderFilesDir, file);
|
||||
Assert.True(dest.Exists);
|
||||
|
||||
var srcData = src.ReadAllBytes();
|
||||
var destData = dest.ReadAllBytes();
|
||||
var srcData = await src.ReadAllBytesAsync();
|
||||
var destData = await dest.ReadAllBytesAsync();
|
||||
|
||||
Assert.Equal(srcData.Length, destData.Length);
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user