Merge pull request #78 from wabbajack-tools/bs2-support

BA2 support
This commit is contained in:
Timothy Baldridge 2019-10-11 21:10:00 -06:00 committed by GitHub
commit 1282614825
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 1299 additions and 96 deletions

View File

@ -7,6 +7,8 @@
* Remove nexus timeout for login, it's pointless.
* Force slides to load before displaying
* Supress slide load failures
* Setup Crash handling at the very start of the app
* Add BA2 support
#### Version 0.9.4 - 10/2/2019
* Point github icon to https://github.com/wabbajack-tools/wabbajack

View File

@ -56,6 +56,9 @@
<Reference Include="ICSharpCode.SharpZipLib, Version=1.2.0.246, Culture=neutral, PublicKeyToken=1b03e6acf1164f73, processorArchitecture=MSIL">
<HintPath>..\packages\SharpZipLib.1.2.0\lib\net45\ICSharpCode.SharpZipLib.dll</HintPath>
</Reference>
<Reference Include="Newtonsoft.Json, Version=12.0.0.0, Culture=neutral, PublicKeyToken=30ad4fe6b2a6aeed, processorArchitecture=MSIL">
<HintPath>..\packages\Newtonsoft.Json.12.0.2\lib\net45\Newtonsoft.Json.dll</HintPath>
</Reference>
<Reference Include="System" />
<Reference Include="System.Core" />
<Reference Include="System.Xml.Linq" />

View File

@ -1,85 +1,101 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using Newtonsoft.Json;
namespace Compression.BSA.Test
{
internal class Program
{
private const string TestDir = @"D:\MO2 Instances\";
private const string TempDir = "c:\\tmp\\out";
//private const string TestDirBSA = @"D:\MO2 Instances\F4EE";
//private const string TestDirBA2 = @"D:\MO2 Instances\F4EE";
private const string TestDir = @"D:\MO2 Instances";
//private const string TestDir = @"D:\Steam\steamapps\common\Fallout 4";
private const string TempDir = @"c:\tmp\out\f4ee";
private const string ArchiveTempDir = @"c:\tmp\out\archive";
//private const string Archive2Location = @"D:\Steam\steamapps\common\Fallout 4\Tools\Archive2\Archive2.exe";
private static void Main(string[] args)
{
foreach (var bsa in Directory.EnumerateFiles(TestDir, "*.bsa", SearchOption.AllDirectories).Skip(0))
foreach (var bsa in Directory.EnumerateFiles(TestDir, "*.ba2", SearchOption.AllDirectories)
//.Concat(Directory.EnumerateFiles(TestDir, "*.bsa", SearchOption.AllDirectories))
)
{
Console.WriteLine($"From {bsa}");
Console.WriteLine("Cleaning Output Dir");
if (Directory.Exists(TempDir)) Directory.Delete(TempDir, true);
if (Directory.Exists(ArchiveTempDir)) Directory.Delete(ArchiveTempDir, true);
Directory.CreateDirectory(TempDir);
Console.WriteLine($"Reading {bsa}");
using (var a = new BSAReader(bsa))
using (var a = BSADispatch.OpenRead(bsa))
{
Parallel.ForEach(a.Files, file =>
{
var abs_name = Path.Combine(TempDir, file.Path);
ViaJson(file.State);
if (!Directory.Exists(Path.GetDirectoryName(abs_name)))
Directory.CreateDirectory(Path.GetDirectoryName(abs_name));
using (var fs = File.OpenWrite(abs_name))
{
file.CopyDataTo(fs);
}
Equal(file.Size, new FileInfo(abs_name).Length);
});
/*
Console.WriteLine("Extracting via Archive.exe");
if (bsa.ToLower().EndsWith(".ba2"))
{
var p = Process.Start(Archive2Location, $"\"{bsa}\" -e=\"{ArchiveTempDir}\"");
p.WaitForExit();
foreach (var file in a.Files)
{
var a_path = Path.Combine(TempDir, file.Path);
var b_path = Path.Combine(ArchiveTempDir, file.Path);
Equal(new FileInfo(a_path).Length, new FileInfo(b_path).Length);
Equal(File.ReadAllBytes(a_path), File.ReadAllBytes(b_path));
}
}*/
Console.WriteLine($"Building {bsa}");
using (var w = new BSABuilder())
using (var w = ViaJson(a.State).MakeBuilder())
{
w.ArchiveFlags = a.ArchiveFlags;
w.FileFlags = a.FileFlags;
w.HeaderType = a.HeaderType;
Parallel.ForEach(a.Files, file =>
{
var abs_path = Path.Combine("c:\\tmp\\out", file.Path);
var abs_path = Path.Combine(TempDir, file.Path);
using (var str = File.OpenRead(abs_path))
{
var entry = w.AddFile(file.Path, str, file.FlipCompression);
w.AddFile(ViaJson(file.State), str);
}
});
w.Build("c:\\tmp\\tmp.bsa");
// Sanity Checks
Equal(a.Files.Count(), w.Files.Count());
Equal(a.Files.Select(f => f.Path).ToHashSet(), w.Files.Select(f => f.Path).ToHashSet());
/*foreach (var pair in Enumerable.Zip(a.Files, w.Files, (ai, bi) => (ai, bi)))
{
Console.WriteLine($"{pair.ai.Path}, {pair.ai.Hash}, {pair.bi.Path}, {pair.bi.Hash}");
}*/
foreach (var pair in a.Files.Zip(w.Files, (ai, bi) => (ai, bi)))
{
Equal(pair.ai.Path, pair.bi.Path);
Equal(pair.ai.Hash, pair.bi.Hash);
}
}
Console.WriteLine($"Verifying {bsa}");
using (var b = new BSAReader("c:\\tmp\\tmp.bsa"))
using (var b = BSADispatch.OpenRead("c:\\tmp\\tmp.bsa"))
{
Console.WriteLine($"Performing A/B tests on {bsa}");
Equal((uint) a.ArchiveFlags, (uint) b.ArchiveFlags);
Equal((uint) a.FileFlags, (uint) b.FileFlags);
Equal(JsonConvert.SerializeObject(a.State), JsonConvert.SerializeObject(b.State));
//Equal((uint) a.ArchiveFlags, (uint) b.ArchiveFlags);
//Equal((uint) a.FileFlags, (uint) b.FileFlags);
// Check same number of files
Equal(a.Files.Count(), b.Files.Count());
@ -87,17 +103,37 @@ namespace Compression.BSA.Test
foreach (var pair in a.Files.Zip(b.Files, (ai, bi) => (ai, bi)))
{
idx++;
Equal(JsonConvert.SerializeObject(pair.ai.State),
JsonConvert.SerializeObject(pair.bi.State));
//Console.WriteLine($" - {pair.ai.Path}");
Equal(pair.ai.Path, pair.bi.Path);
Equal(pair.ai.Compressed, pair.bi.Compressed);
//Equal(pair.ai.Compressed, pair.bi.Compressed);
Equal(pair.ai.Size, pair.bi.Size);
Equal(pair.ai.GetData(), pair.bi.GetData());
Equal(GetData(pair.ai), GetData(pair.bi));
}
}
}
}
}
private static byte[] GetData(IFile pairAi)
{
using (var ms = new MemoryStream())
{
pairAi.CopyDataTo(ms);
return ms.ToArray();
}
}
public static T ViaJson<T>(T i)
{
var settings = new JsonSerializerSettings
{
TypeNameHandling = TypeNameHandling.All
};
return JsonConvert.DeserializeObject<T>(JsonConvert.SerializeObject(i, settings), settings);
}
private static void Equal(HashSet<string> a, HashSet<string> b)
{
Equal(a.Count, b.Count);
@ -158,8 +194,12 @@ namespace Compression.BSA.Test
if (a.Length != b.Length) throw new InvalidDataException("Byte array sizes are not equal");
for (var idx = 0; idx < a.Length; idx++)
{
if (a[idx] != b[idx])
throw new InvalidDataException($"Byte array contents not equal at {idx}");
{
Console.WriteLine($"Byte array contents not equal at {idx} - {a[idx]} vs {b[idx]}");
}
}
}
}
}

View File

@ -1,5 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="Newtonsoft.Json" version="12.0.2" targetFramework="net472" />
<package id="SharpZipLib" version="1.2.0" targetFramework="net472" />
</packages>

View File

@ -0,0 +1,259 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using ICSharpCode.SharpZipLib.Zip.Compression;
using ICSharpCode.SharpZipLib.Zip.Compression.Streams;
namespace Compression.BSA
{
interface IFileBuilder
{
uint FileHash { get; }
uint DirHash { get; }
string FullName { get; }
int Index { get; }
void WriteData(BinaryWriter wtr);
void WriteHeader(BinaryWriter wtr);
}
public class BA2Builder : IBSABuilder
{
private BA2StateObject _state;
private List<IFileBuilder> _entries = new List<IFileBuilder>();
public BA2Builder(BA2StateObject state)
{
_state = state;
}
public void Dispose()
{
}
public void AddFile(FileStateObject state, Stream src)
{
switch (_state.Type)
{
case EntryType.GNRL:
var result = new BA2FileEntryBuilder((BA2FileEntryState)state, src);
lock(_entries) _entries.Add(result);
break;
case EntryType.DX10:
var resultdx10 = new BA2DX10FileEntryBuilder((BA2DX10EntryState)state, src);
lock(_entries) _entries.Add(resultdx10);
break;
}
}
public void Build(string filename)
{
SortEntries();
using (var fs = File.OpenWrite(filename))
using (var bw = new BinaryWriter(fs))
{
bw.Write(Encoding.ASCII.GetBytes(_state.HeaderMagic));
bw.Write(_state.Version);
bw.Write(Encoding.ASCII.GetBytes(Enum.GetName(typeof(EntryType), _state.Type)));
bw.Write((uint)_entries.Count);
var table_offset_loc = bw.BaseStream.Position;
bw.Write((ulong)0);
foreach (var entry in _entries)
{
entry.WriteHeader(bw);
}
foreach (var entry in _entries)
{
entry.WriteData(bw);
}
if (_state.HasNameTable)
{
var pos = bw.BaseStream.Position;
bw.BaseStream.Seek(table_offset_loc, SeekOrigin.Begin);
bw.Write((ulong) pos);
bw.BaseStream.Seek(pos, SeekOrigin.Begin);
foreach (var entry in _entries)
{
var bytes = Encoding.UTF7.GetBytes(entry.FullName);
bw.Write((ushort)bytes.Length);
bw.Write(bytes);
}
}
}
}
private void SortEntries()
{
_entries = _entries.OrderBy(e => e.Index).ToList();
}
}
public class BA2DX10FileEntryBuilder : IFileBuilder
{
private BA2DX10EntryState _state;
private List<ChunkBuilder> _chunks;
public BA2DX10FileEntryBuilder(BA2DX10EntryState state, Stream src)
{
_state = state;
var header_size = DDS.HeaderSizeForFormat((DXGI_FORMAT) state.PixelFormat) + 4;
new BinaryReader(src).ReadBytes((int)header_size);
_chunks = _state.Chunks.Select(ch => new ChunkBuilder(state, ch, src)).ToList();
}
public uint FileHash => _state.NameHash;
public uint DirHash => _state.DirHash;
public string FullName => _state.Path;
public int Index => _state.Index;
public void WriteHeader(BinaryWriter bw)
{
bw.Write(_state.NameHash);
bw.Write(Encoding.ASCII.GetBytes(_state.Extension));
bw.Write(_state.DirHash);
bw.Write(_state.Unk8);
bw.Write((byte)_chunks.Count);
bw.Write(_state.ChunkHdrLen);
bw.Write(_state.Height);
bw.Write(_state.Width);
bw.Write(_state.NumMips);
bw.Write(_state.PixelFormat);
bw.Write(_state.Unk16);
foreach (var chunk in _chunks)
chunk.WriteHeader(bw);
}
public void WriteData(BinaryWriter wtr)
{
foreach (var chunk in _chunks)
chunk.WriteData(wtr);
}
}
public class ChunkBuilder
{
private ChunkState _chunk;
private byte[] _data;
private uint _packSize;
private long _offsetOffset;
public ChunkBuilder(BA2DX10EntryState state, ChunkState ch, Stream src)
{
_chunk = ch;
using (var ms = new MemoryStream())
{
src.CopyToLimit(ms, (int)_chunk.FullSz);
_data = ms.ToArray();
}
if (_chunk.Compressed)
{
using (var ms = new MemoryStream())
{
using (var ds = new DeflaterOutputStream(ms))
{
ds.Write(_data, 0, _data.Length);
}
_data = ms.ToArray();
}
_packSize = (uint)_data.Length;
}
}
public void WriteHeader(BinaryWriter bw)
{
_offsetOffset = bw.BaseStream.Position;
bw.Write((ulong)0);
bw.Write(_packSize);
bw.Write(_chunk.FullSz);
bw.Write(_chunk.StartMip);
bw.Write(_chunk.EndMip);
bw.Write(_chunk.Align);
}
public void WriteData(BinaryWriter bw)
{
var pos = bw.BaseStream.Position;
bw.BaseStream.Position = _offsetOffset;
bw.Write((ulong)pos);
bw.BaseStream.Position = pos;
bw.Write(_data);
}
}
public class BA2FileEntryBuilder : IFileBuilder
{
private byte[] _data;
private int _rawSize;
private int _size;
private BA2FileEntryState _state;
private long _offsetOffset;
public BA2FileEntryBuilder(BA2FileEntryState state, Stream src)
{
_state = state;
using (var ms = new MemoryStream())
{
src.CopyTo(ms);
_data = ms.ToArray();
}
_rawSize = _data.Length;
if (state.Compressed)
{
using (var ms = new MemoryStream())
{
using (var ds = new DeflaterOutputStream(ms))
{
ds.Write(_data, 0, _data.Length);
}
_data = ms.ToArray();
}
_size = _data.Length;
}
}
public uint FileHash => _state.NameHash;
public uint DirHash => _state.DirHash;
public string FullName => _state.Path;
public int Index => _state.Index;
public void WriteHeader(BinaryWriter wtr)
{
wtr.Write(_state.NameHash);
wtr.Write(Encoding.ASCII.GetBytes(_state.Extension));
wtr.Write(_state.DirHash);
wtr.Write(_state.Flags);
_offsetOffset = wtr.BaseStream.Position;
wtr.Write((ulong)0);
wtr.Write(_size);
wtr.Write(_rawSize);
wtr.Write(_state.Align);
}
public void WriteData(BinaryWriter wtr)
{
var pos = wtr.BaseStream.Position;
wtr.BaseStream.Seek(_offsetOffset, SeekOrigin.Begin);
wtr.Write((ulong)pos);
wtr.BaseStream.Position = pos;
wtr.Write(_data);
}
}
}

View File

@ -0,0 +1,507 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Drawing;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Alphaleonis.Win32.Filesystem;
using ICSharpCode.SharpZipLib.Zip;
using ICSharpCode.SharpZipLib.Zip.Compression;
using Microsoft.SqlServer.Server;
using File = Alphaleonis.Win32.Filesystem.File;
namespace Compression.BSA
{
public enum EntryType
{
GNRL,
DX10,
GNMF
}
interface IFileEntry : IFile
{
string FullPath { get; set; }
}
public class BA2Reader : IBSAReader
{
internal string _filename;
private Stream _stream;
internal BinaryReader _rdr;
internal uint _version;
internal string _headerMagic;
internal EntryType _type;
internal uint _numFiles;
internal ulong _nameTableOffset;
public bool UseATIFourCC { get; set; } = false;
public bool HasNameTable => _nameTableOffset > 0;
public BA2Reader(string filename) : this(File.OpenRead(filename))
{
_filename = filename;
}
public BA2Reader(Stream stream)
{
_stream = stream;
_rdr = new BinaryReader(_stream, Encoding.UTF7);
LoadHeaders();
}
public void LoadHeaders()
{
_headerMagic = Encoding.ASCII.GetString(_rdr.ReadBytes(4));
if (_headerMagic != "BTDX")
throw new InvalidDataException("Unknown header type: " + _headerMagic);
_version = _rdr.ReadUInt32();
string fourcc = Encoding.ASCII.GetString(_rdr.ReadBytes(4));
if (Enum.TryParse(fourcc, out EntryType entryType))
{
_type = entryType;
}
else
{
throw new InvalidDataException($"Can't parse entry types of {fourcc}");
}
_numFiles = _rdr.ReadUInt32();
_nameTableOffset = _rdr.ReadUInt64();
var files = new List<IFileEntry>();
for (var idx = 0; idx < _numFiles; idx += 1)
{
switch (_type)
{
case EntryType.GNRL:
files.Add(new BA2FileEntry(this, idx));
break;
case EntryType.DX10:
files.Add(new BA2DX10Entry(this, idx));
break;
case EntryType.GNMF:
break;
}
}
if (HasNameTable)
{
_rdr.BaseStream.Seek((long) _nameTableOffset, SeekOrigin.Begin);
foreach (var file in files)
file.FullPath = Encoding.UTF7.GetString(_rdr.ReadBytes(_rdr.ReadInt16()));
}
Files = files;
}
public void Dispose()
{
_stream?.Dispose();
_rdr?.Dispose();
}
public IEnumerable<IFile> Files { get; private set; }
public ArchiveStateObject State => new BA2StateObject(this);
}
public class BA2StateObject : ArchiveStateObject
{
public BA2StateObject()
{
}
public BA2StateObject(BA2Reader ba2Reader)
{
Version = ba2Reader._version;
HeaderMagic = ba2Reader._headerMagic;
Type = ba2Reader._type;
HasNameTable = ba2Reader.HasNameTable;
}
public bool HasNameTable { get; set; }
public EntryType Type { get; set; }
public string HeaderMagic { get; set; }
public uint Version { get; set; }
public override IBSABuilder MakeBuilder()
{
return new BA2Builder(this);
}
}
public class BA2DX10Entry : IFileEntry
{
internal uint _nameHash;
internal string _extension;
internal uint _dirHash;
internal byte _unk8;
internal byte _numChunks;
internal ushort _chunkHdrLen;
internal ushort _height;
internal ushort _width;
internal byte _numMips;
internal byte _format;
internal ushort _unk16;
internal List<BA2TextureChunk> _chunks;
private BA2Reader _bsa;
internal int _index;
public BA2DX10Entry(BA2Reader ba2Reader, int idx)
{
_bsa = ba2Reader;
var _rdr = ba2Reader._rdr;
_nameHash = _rdr.ReadUInt32();
FullPath = _nameHash.ToString("X");
_extension = Encoding.UTF7.GetString(_rdr.ReadBytes(4));
_dirHash = _rdr.ReadUInt32();
_unk8 = _rdr.ReadByte();
_numChunks = _rdr.ReadByte();
_chunkHdrLen = _rdr.ReadUInt16();
_height = _rdr.ReadUInt16();
_width = _rdr.ReadUInt16();
_numMips = _rdr.ReadByte();
_format = _rdr.ReadByte();
_unk16 = _rdr.ReadUInt16();
_index = idx;
_chunks = Enumerable.Range(0, _numChunks)
.Select(_ => new BA2TextureChunk(_rdr))
.ToList();
}
public string FullPath { get; set; }
public string Path => FullPath;
public uint Size => (uint)_chunks.Sum(f => f._fullSz) + HeaderSize + sizeof(uint);
public FileStateObject State => new BA2DX10EntryState(this);
public uint HeaderSize => DDS.HeaderSizeForFormat((DXGI_FORMAT)_format);
public void CopyDataTo(Stream output)
{
var bw = new BinaryWriter(output);
WriteHeader(bw);
using (var fs = File.OpenRead(_bsa._filename))
using (var br = new BinaryReader(fs))
{
foreach (var chunk in _chunks)
{
byte[] full = new byte[chunk._fullSz];
var isCompressed = chunk._packSz != 0;
br.BaseStream.Seek((long)chunk._offset, SeekOrigin.Begin);
if (!isCompressed)
{
br.Read(full, 0, full.Length);
}
else
{
byte[] compressed = new byte[chunk._packSz];
br.Read(compressed, 0, compressed.Length);
var inflater = new Inflater();
inflater.SetInput(compressed);
inflater.Inflate(full);
}
bw.Write(full);
}
}
}
private void WriteHeader(BinaryWriter bw)
{
var ddsHeader = new DDS_HEADER();
ddsHeader.dwSize = ddsHeader.GetSize();
ddsHeader.dwHeaderFlags = DDS.DDS_HEADER_FLAGS_TEXTURE | DDS.DDS_HEADER_FLAGS_LINEARSIZE | DDS.DDS_HEADER_FLAGS_MIPMAP;
ddsHeader.dwHeight = _height;
ddsHeader.dwWidth = _width;
ddsHeader.dwMipMapCount = _numMips;
ddsHeader.PixelFormat.dwSize = ddsHeader.PixelFormat.GetSize();
ddsHeader.dwDepth = 1;
ddsHeader.dwSurfaceFlags = DDS.DDS_SURFACE_FLAGS_TEXTURE | DDS.DDS_SURFACE_FLAGS_MIPMAP;
switch ((DXGI_FORMAT)_format)
{
case DXGI_FORMAT.DXGI_FORMAT_BC1_UNORM:
ddsHeader.PixelFormat.dwFlags = DDS.DDS_FOURCC;
ddsHeader.PixelFormat.dwFourCC = DDS.MAKEFOURCC('D', 'X', 'T', '1');
ddsHeader.dwPitchOrLinearSize = (uint)(_width * _height / 2); // 4bpp
break;
case DXGI_FORMAT.DXGI_FORMAT_BC2_UNORM:
ddsHeader.PixelFormat.dwFlags = DDS.DDS_FOURCC;
ddsHeader.PixelFormat.dwFourCC = DDS.MAKEFOURCC('D', 'X', 'T', '3');
ddsHeader.dwPitchOrLinearSize = (uint)(_width * _height); // 8bpp
break;
case DXGI_FORMAT.DXGI_FORMAT_BC3_UNORM:
ddsHeader.PixelFormat.dwFlags = DDS.DDS_FOURCC;
ddsHeader.PixelFormat.dwFourCC = DDS.MAKEFOURCC('D', 'X', 'T', '5');
ddsHeader.dwPitchOrLinearSize = (uint)(_width * _height); // 8bpp
break;
case DXGI_FORMAT.DXGI_FORMAT_BC5_UNORM:
ddsHeader.PixelFormat.dwFlags = DDS.DDS_FOURCC;
if (_bsa.UseATIFourCC)
ddsHeader.PixelFormat.dwFourCC = DDS.MAKEFOURCC('A', 'T', 'I', '2'); // this is more correct but the only thing I have found that supports it is the nvidia photoshop plugin
else
ddsHeader.PixelFormat.dwFourCC = DDS.MAKEFOURCC('B', 'C', '5', 'U');
ddsHeader.dwPitchOrLinearSize = (uint)(_width * _height); // 8bpp
break;
case DXGI_FORMAT.DXGI_FORMAT_BC1_UNORM_SRGB:
ddsHeader.PixelFormat.dwFlags = DDS.DDS_FOURCC;
ddsHeader.PixelFormat.dwFourCC = DDS.MAKEFOURCC('D', 'X', '1', '0');
ddsHeader.dwPitchOrLinearSize = (uint)(_width * _height / 2); // 4bpp
break;
case DXGI_FORMAT.DXGI_FORMAT_BC3_UNORM_SRGB:
case DXGI_FORMAT.DXGI_FORMAT_BC6H_UF16:
case DXGI_FORMAT.DXGI_FORMAT_BC4_UNORM:
case DXGI_FORMAT.DXGI_FORMAT_BC5_SNORM:
case DXGI_FORMAT.DXGI_FORMAT_BC7_UNORM:
case DXGI_FORMAT.DXGI_FORMAT_BC7_UNORM_SRGB:
ddsHeader.PixelFormat.dwFlags = DDS.DDS_FOURCC;
ddsHeader.PixelFormat.dwFourCC = DDS.MAKEFOURCC('D', 'X', '1', '0');
ddsHeader.dwPitchOrLinearSize = (uint)(_width * _height); // 8bpp
break;
case DXGI_FORMAT.DXGI_FORMAT_R8G8B8A8_UNORM:
case DXGI_FORMAT.DXGI_FORMAT_R8G8B8A8_UNORM_SRGB:
ddsHeader.PixelFormat.dwFlags = DDS.DDS_RGBA;
ddsHeader.PixelFormat.dwRGBBitCount = 32;
ddsHeader.PixelFormat.dwRBitMask = 0x000000FF;
ddsHeader.PixelFormat.dwGBitMask = 0x0000FF00;
ddsHeader.PixelFormat.dwBBitMask = 0x00FF0000;
ddsHeader.PixelFormat.dwABitMask = 0xFF000000;
ddsHeader.dwPitchOrLinearSize = (uint)(_width * _height * 4); // 32bpp
break;
case DXGI_FORMAT.DXGI_FORMAT_B8G8R8A8_UNORM:
case DXGI_FORMAT.DXGI_FORMAT_B8G8R8X8_UNORM:
ddsHeader.PixelFormat.dwFlags = DDS.DDS_RGBA;
ddsHeader.PixelFormat.dwRGBBitCount = 32;
ddsHeader.PixelFormat.dwRBitMask = 0x00FF0000;
ddsHeader.PixelFormat.dwGBitMask = 0x0000FF00;
ddsHeader.PixelFormat.dwBBitMask = 0x000000FF;
ddsHeader.PixelFormat.dwABitMask = 0xFF000000;
ddsHeader.dwPitchOrLinearSize = (uint)(_width * _height * 4); // 32bpp
break;
case DXGI_FORMAT.DXGI_FORMAT_R8_UNORM:
ddsHeader.PixelFormat.dwFlags = DDS.DDS_RGB;
ddsHeader.PixelFormat.dwRGBBitCount = 8;
ddsHeader.PixelFormat.dwRBitMask = 0xFF;
ddsHeader.dwPitchOrLinearSize = (uint)(_width * _height); // 8bpp
break;
default:
throw new Exception("Unsupported DDS header format. File: " + this.FullPath);
}
bw.Write((uint)DDS.DDS_MAGIC);
ddsHeader.Write(bw);
switch ((DXGI_FORMAT)_format)
{
case DXGI_FORMAT.DXGI_FORMAT_BC1_UNORM_SRGB:
case DXGI_FORMAT.DXGI_FORMAT_BC3_UNORM_SRGB:
case DXGI_FORMAT.DXGI_FORMAT_BC4_UNORM:
case DXGI_FORMAT.DXGI_FORMAT_BC5_SNORM:
case DXGI_FORMAT.DXGI_FORMAT_BC6H_UF16:
case DXGI_FORMAT.DXGI_FORMAT_BC7_UNORM:
case DXGI_FORMAT.DXGI_FORMAT_BC7_UNORM_SRGB:
var dxt10 = new DDS_HEADER_DXT10()
{
dxgiFormat = _format,
resourceDimension = (uint)DXT10_RESOURCE_DIMENSION.DIMENSION_TEXTURE2D,
miscFlag = 0,
arraySize = 1,
miscFlags2 = DDS.DDS_ALPHA_MODE_UNKNOWN
};
dxt10.Write(bw);
break;
}
}
}
public class BA2DX10EntryState : FileStateObject
{
public BA2DX10EntryState() { }
public BA2DX10EntryState(BA2DX10Entry ba2Dx10Entry)
{
Path = ba2Dx10Entry.FullPath;
NameHash = ba2Dx10Entry._nameHash;
Extension = ba2Dx10Entry._extension;
DirHash = ba2Dx10Entry._dirHash;
Unk8 = ba2Dx10Entry._unk8;
ChunkHdrLen = ba2Dx10Entry._chunkHdrLen;
Height = ba2Dx10Entry._height;
Width = ba2Dx10Entry._width;
NumMips = ba2Dx10Entry._numMips;
PixelFormat = ba2Dx10Entry._format;
Unk16 = ba2Dx10Entry._unk16;
Index = ba2Dx10Entry._index;
Chunks = ba2Dx10Entry._chunks.Select(ch => new ChunkState(ch)).ToList();
}
public string Path { get; set; }
public List<ChunkState> Chunks { get; set; }
public ushort Unk16 { get; set; }
public byte PixelFormat { get; set; }
public byte NumMips { get; set; }
public ushort Width { get; set; }
public ushort Height { get; set; }
public ushort ChunkHdrLen { get; set; }
public byte Unk8 { get; set; }
public uint DirHash { get; set; }
public string Extension { get; set; }
public uint NameHash { get; set; }
}
public class ChunkState
{
public ChunkState() {}
public ChunkState(BA2TextureChunk ch)
{
FullSz = ch._fullSz;
StartMip = ch._startMip;
EndMip = ch._endMip;
Align = ch._align;
Compressed = ch._packSz != 0;
}
public bool Compressed { get; set; }
public uint Align { get; set; }
public ushort EndMip { get; set; }
public ushort StartMip { get; set; }
public uint FullSz { get; set; }
}
public class BA2TextureChunk
{
internal ulong _offset;
internal uint _packSz;
internal uint _fullSz;
internal ushort _startMip;
internal ushort _endMip;
internal uint _align;
public BA2TextureChunk(BinaryReader rdr)
{
_offset = rdr.ReadUInt64();
_packSz = rdr.ReadUInt32();
_fullSz = rdr.ReadUInt32();
_startMip = rdr.ReadUInt16();
_endMip = rdr.ReadUInt16();
_align = rdr.ReadUInt32();
}
}
public class BA2FileEntry : IFileEntry
{
internal uint _nameHash;
internal string _extension;
internal uint _dirHash;
internal uint _flags;
internal ulong _offset;
internal uint _size;
internal uint _realSize;
internal uint _align;
internal BA2Reader _bsa;
internal int _index;
public bool Compressed => _size != 0;
public BA2FileEntry(BA2Reader ba2Reader, int index)
{
_index = index;
_bsa = ba2Reader;
var _rdr = ba2Reader._rdr;
_nameHash = _rdr.ReadUInt32();
FullPath = _nameHash.ToString("X");
_extension = Encoding.UTF7.GetString(_rdr.ReadBytes(4));
_dirHash = _rdr.ReadUInt32();
_flags = _rdr.ReadUInt32();
_offset = _rdr.ReadUInt64();
_size = _rdr.ReadUInt32();
_realSize = _rdr.ReadUInt32();
_align = _rdr.ReadUInt32();
}
public string FullPath { get; set; }
public string Path => FullPath;
public uint Size => _realSize;
public FileStateObject State => new BA2FileEntryState(this);
public void CopyDataTo(Stream output)
{
using (var bw = new BinaryWriter(output))
using (var fs = File.OpenRead(_bsa._filename))
using (var br = new BinaryReader(fs))
{
br.BaseStream.Seek((long) _offset, SeekOrigin.Begin);
uint len = Compressed ? _size : _realSize;
var bytes = new byte[len];
br.Read(bytes, 0, (int) len);
if (!Compressed)
{
bw.Write(bytes);
}
else
{
var uncompressed = new byte[_realSize];
var inflater = new Inflater();
inflater.SetInput(bytes);
inflater.Inflate(uncompressed);
bw.Write(uncompressed);
}
}
}
}
public class BA2FileEntryState : FileStateObject
{
public BA2FileEntryState() { }
public BA2FileEntryState(BA2FileEntry ba2FileEntry)
{
NameHash = ba2FileEntry._nameHash;
DirHash = ba2FileEntry._dirHash;
Flags = ba2FileEntry._flags;
Align = ba2FileEntry._align;
Compressed = ba2FileEntry.Compressed;
Path = ba2FileEntry.FullPath;
Extension = ba2FileEntry._extension;
Index = ba2FileEntry._index;
}
public string Extension { get; set; }
public string Path { get; set; }
public bool Compressed { get; set; }
public uint Align { get; set; }
public uint Flags { get; set; }
public uint DirHash { get; set; }
public uint NameHash { get; set; }
}
}

View File

@ -11,7 +11,7 @@ using Path = Alphaleonis.Win32.Filesystem.Path;
namespace Compression.BSA
{
public class BSABuilder : IDisposable
public class BSABuilder : IDisposable, IBSABuilder
{
internal uint _archiveFlags;
internal uint _fileCount;
@ -32,6 +32,13 @@ namespace Compression.BSA
_offset = 0x24;
}
public BSABuilder(BSAStateObject bsaStateObject) : this()
{
_version = bsaStateObject.Version;
_fileFlags = bsaStateObject.FileFlags;
_archiveFlags = bsaStateObject.ArchiveFlags;
}
public IEnumerable<FileEntry> Files => _files;
public ArchiveFlags ArchiveFlags
@ -85,6 +92,18 @@ namespace Compression.BSA
return r;
}
public void AddFile(FileStateObject state, Stream src)
{
var ostate = (BSAFileStateObject) state;
var r = new FileEntry(this, ostate.Path, src, ostate.FlipCompression);
lock (this)
{
_files.Add(r);
}
}
public void Build(string outputName)
{
RegenFolderRecords();

View File

@ -0,0 +1,27 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Compression.BSA
{
public static class BSADispatch
{
public static IBSAReader OpenRead(string filename)
{
string fourcc = "";
using (var file = File.OpenRead(filename))
{
fourcc = Encoding.ASCII.GetString(new BinaryReader(file).ReadBytes(4));
}
if (fourcc == "BSA\0")
return new BSAReader(filename);
if (fourcc == "BTDX")
return new BA2Reader(filename);
throw new InvalidDataException("Filename is not a .bsa or .ba2, magic " + fourcc);
}
}
}

View File

@ -2,6 +2,7 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Threading;
using ICSharpCode.SharpZipLib.Zip.Compression.Streams;
using K4os.Compression.LZ4.Streams;
using File = Alphaleonis.Win32.Filesystem.File;
@ -46,21 +47,21 @@ namespace Compression.BSA
Miscellaneous = 0x100
}
public class BSAReader : IDisposable
public class BSAReader : IDisposable, IBSAReader
{
private uint _archiveFlags;
private uint _fileCount;
private uint _fileFlags;
internal uint _archiveFlags;
internal uint _fileCount;
internal uint _fileFlags;
internal string _fileName;
private uint _folderCount;
private uint _folderRecordOffset;
internal uint _folderCount;
internal uint _folderRecordOffset;
private List<FolderRecord> _folders;
private string _magic;
internal string _magic;
private readonly BinaryReader _rdr;
private readonly Stream _stream;
private uint _totalFileNameLength;
private uint _totalFolderNameLength;
private uint _version;
internal uint _totalFileNameLength;
internal uint _totalFolderNameLength;
internal uint _version;
public BSAReader(string filename) : this(File.OpenRead(filename))
{
@ -74,7 +75,7 @@ namespace Compression.BSA
LoadHeaders();
}
public IEnumerable<FileRecord> Files
public IEnumerable<IFile> Files
{
get
{
@ -84,6 +85,8 @@ namespace Compression.BSA
}
}
public ArchiveStateObject State => new BSAStateObject(this);
public VersionType HeaderType => (VersionType) _version;
public ArchiveFlags ArchiveFlags => (ArchiveFlags) _archiveFlags;
@ -148,6 +151,29 @@ namespace Compression.BSA
}
}
public class BSAStateObject : ArchiveStateObject
{
public BSAStateObject() { }
public BSAStateObject(BSAReader bsaReader)
{
Magic = bsaReader._magic;
Version = bsaReader._version;
ArchiveFlags = bsaReader._archiveFlags;
FileFlags = bsaReader._fileFlags;
}
public override IBSABuilder MakeBuilder()
{
return new BSABuilder(this);
}
public string Magic { get; set; }
public uint Version { get; set; }
public uint ArchiveFlags { get; set; }
public uint FileFlags { get; set; }
}
public class FolderRecord
{
private readonly uint _fileCount;
@ -179,11 +205,12 @@ namespace Compression.BSA
if (bsa.HasFolderNames) Name = src.ReadStringLen(bsa.HeaderType);
_files = new List<FileRecord>();
for (var idx = 0; idx < _fileCount; idx += 1) _files.Add(new FileRecord(bsa, this, src));
for (var idx = 0; idx < _fileCount; idx += 1)
_files.Add(new FileRecord(bsa, this, src, idx));
}
}
public class FileRecord
public class FileRecord : IFile
{
private readonly BSAReader _bsa;
private readonly long _dataOffset;
@ -194,9 +221,11 @@ namespace Compression.BSA
private readonly uint _onDiskSize;
private readonly uint _originalSize;
private readonly uint _size;
internal readonly int _index;
public FileRecord(BSAReader bsa, FolderRecord folderRecord, BinaryReader src)
public FileRecord(BSAReader bsa, FolderRecord folderRecord, BinaryReader src, int index)
{
_index = index;
_bsa = bsa;
Hash = src.ReadUInt64();
var size = src.ReadUInt32();
@ -259,7 +288,8 @@ namespace Compression.BSA
}
}
public int Size => (int) _dataSize;
public uint Size => _dataSize;
public FileStateObject State => new BSAFileStateObject(this);
public ulong Hash { get; }
@ -311,4 +341,18 @@ namespace Compression.BSA
return ms.ToArray();
}
}
public class BSAFileStateObject : FileStateObject
{
public BSAFileStateObject() { }
public BSAFileStateObject(FileRecord fileRecord)
{
FlipCompression = fileRecord.FlipCompression;
Path = fileRecord.Path;
Index = fileRecord._index;
}
public bool FlipCompression { get; set; }
public string Path { get; set; }
}
}

View File

@ -22,6 +22,7 @@
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<PlatformTarget>x64</PlatformTarget>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
@ -30,6 +31,7 @@
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Debug|x64'">
<DebugSymbols>true</DebugSymbols>
@ -39,6 +41,7 @@
<PlatformTarget>x64</PlatformTarget>
<ErrorReport>prompt</ErrorReport>
<CodeAnalysisRuleSet>MinimumRecommendedRules.ruleset</CodeAnalysisRuleSet>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|x64'">
<OutputPath>bin\x64\Release\</OutputPath>
@ -48,6 +51,7 @@
<PlatformTarget>x64</PlatformTarget>
<ErrorReport>prompt</ErrorReport>
<CodeAnalysisRuleSet>MinimumRecommendedRules.ruleset</CodeAnalysisRuleSet>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<ItemGroup>
<Reference Include="AlphaFS, Version=2.2.0.0, Culture=neutral, PublicKeyToken=4d31a58f7d7ad5c9, processorArchitecture=MSIL">
@ -70,6 +74,7 @@
<HintPath>..\packages\System.Buffers.4.4.0\lib\netstandard2.0\System.Buffers.dll</HintPath>
</Reference>
<Reference Include="System.Core" />
<Reference Include="System.Drawing" />
<Reference Include="System.Memory, Version=4.0.1.1, Culture=neutral, PublicKeyToken=cc7b13ffcd2ddd51, processorArchitecture=MSIL">
<HintPath>..\packages\System.Memory.4.5.3\lib\netstandard2.0\System.Memory.dll</HintPath>
</Reference>
@ -89,8 +94,13 @@
<Reference Include="System.Xml" />
</ItemGroup>
<ItemGroup>
<Compile Include="BA2Builder.cs" />
<Compile Include="BA2Reader.cs" />
<Compile Include="BSABuilder.cs" />
<Compile Include="BSADispatch.cs" />
<Compile Include="BSAReader.cs" />
<Compile Include="DDS.cs" />
<Compile Include="IBSAReader.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<Compile Include="Utils.cs" />
</ItemGroup>

223
Compression.BSA/DDS.cs Normal file
View File

@ -0,0 +1,223 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading.Tasks;
namespace Compression.BSA
{
/*
* Copied from https://raw.githubusercontent.com/AlexxEG/BSA_Browser/master/Sharp.BSA.BA2/BA2Util/DDS.cs
* which is also GPL3 code. Modified slightly for Wabbajack
*
*/
/*
* Copied from dds.h. Includes (almost) only stuff I need in this project.
*
* Link: https://github.com/digitalutopia1/BA2Lib/blob/master/BA2Lib/dds.h
*
*/
public class DDS
{
public static uint HeaderSizeForFormat(DXGI_FORMAT fmt)
{
switch (fmt)
{
case DXGI_FORMAT.DXGI_FORMAT_BC1_UNORM_SRGB:
case DXGI_FORMAT.DXGI_FORMAT_BC3_UNORM_SRGB:
case DXGI_FORMAT.DXGI_FORMAT_BC4_UNORM:
case DXGI_FORMAT.DXGI_FORMAT_BC5_SNORM:
case DXGI_FORMAT.DXGI_FORMAT_BC6H_UF16:
case DXGI_FORMAT.DXGI_FORMAT_BC7_UNORM:
case DXGI_FORMAT.DXGI_FORMAT_BC7_UNORM_SRGB:
return DDS_HEADER_DXT10.Size + DDS_HEADER.Size;
default:
return DDS_HEADER.Size;
}
}
public const int DDS_MAGIC = 0x20534444; // "DDS "
public static uint MAKEFOURCC(char ch0, char ch1, char ch2, char ch3)
{
// This is alien to me...
return ((uint)(byte)(ch0) | ((uint)(byte)(ch1) << 8) | ((uint)(byte)(ch2) << 16 | ((uint)(byte)(ch3) << 24)));
}
public const int DDS_FOURCC = 0x00000004; // DDPF_FOURCC
public const int DDS_RGB = 0x00000040; // DDPF_RGB
public const int DDS_RGBA = 0x00000041; // DDPF_RGB | DDPF_ALPHAPIXELS
public const int DDS_HEADER_FLAGS_TEXTURE = 0x00001007; // DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH | DDSD_PIXELFORMAT
public const int DDS_HEADER_FLAGS_MIPMAP = 0x00020000; // DDSD_MIPMAPCOUNT
public const int DDS_HEADER_FLAGS_LINEARSIZE = 0x00080000; // DDSD_LINEARSIZE
public const int DDS_SURFACE_FLAGS_TEXTURE = 0x00001000; // DDSCAPS_TEXTURE
public const int DDS_SURFACE_FLAGS_MIPMAP = 0x00400008; // DDSCAPS_COMPLEX | DDSCAPS_MIPMAP
public const int DDS_ALPHA_MODE_UNKNOWN = 0x0;
}
#region dxgiformat.h
public enum DXGI_FORMAT
{
DXGI_FORMAT_R8G8B8A8_UNORM = 28,
DXGI_FORMAT_R8G8B8A8_UNORM_SRGB = 29,
DXGI_FORMAT_R8_UNORM = 61,
DXGI_FORMAT_BC1_UNORM = 71,
DXGI_FORMAT_BC1_UNORM_SRGB = 72,
DXGI_FORMAT_BC2_UNORM = 74,
DXGI_FORMAT_BC3_UNORM = 77,
DXGI_FORMAT_BC3_UNORM_SRGB = 78,
DXGI_FORMAT_BC4_UNORM = 80,
DXGI_FORMAT_BC5_UNORM = 83,
DXGI_FORMAT_BC5_SNORM = 84,
DXGI_FORMAT_B8G8R8A8_UNORM = 87,
DXGI_FORMAT_B8G8R8X8_UNORM = 88,
DXGI_FORMAT_BC6H_UF16 = 95,
DXGI_FORMAT_BC7_UNORM = 98,
DXGI_FORMAT_BC7_UNORM_SRGB = 99
}
#endregion
public enum DXT10_RESOURCE_DIMENSION
{
DIMENSION_TEXTURE1D = 2,
DIMENSION_TEXTURE2D = 3,
DIMENSION_TEXTURE3D = 4,
}
[StructLayout(LayoutKind.Sequential, Pack = 1)]
public struct DDS_HEADER
{
public uint dwSize;
public uint dwHeaderFlags;
public uint dwHeight;
public uint dwWidth;
public uint dwPitchOrLinearSize;
public uint dwDepth; // only if DDS_HEADER_FLAGS_VOLUME is set in dwHeaderFlags
public uint dwMipMapCount;
public uint dwReserved1; // [11]
public DDS_PIXELFORMAT PixelFormat; // ddspf
public uint dwSurfaceFlags;
public uint dwCubemapFlags;
public uint dwReserved2; // [3]
public uint GetSize()
{
// 9 uint + DDS_PIXELFORMAT uints + 2 uint arrays with 14 uints total
// each uint 4 bytes each
return (9 * 4) + PixelFormat.GetSize() + (14 * 4);
}
public void Write(System.IO.BinaryWriter bw)
{
bw.Write(dwSize);
bw.Write(dwHeaderFlags);
bw.Write(dwHeight);
bw.Write(dwWidth);
bw.Write(dwPitchOrLinearSize);
bw.Write(dwDepth);
bw.Write(dwMipMapCount);
// Just write it multiple times, since it's never assigned a value anyway
for (int i = 0; i < 11; i++)
bw.Write(dwReserved1);
// DDS_PIXELFORMAT
bw.Write(PixelFormat.dwSize);
bw.Write(PixelFormat.dwFlags);
bw.Write(PixelFormat.dwFourCC);
bw.Write(PixelFormat.dwRGBBitCount);
bw.Write(PixelFormat.dwRBitMask);
bw.Write(PixelFormat.dwGBitMask);
bw.Write(PixelFormat.dwBBitMask);
bw.Write(PixelFormat.dwABitMask);
bw.Write(dwSurfaceFlags);
bw.Write(dwCubemapFlags);
// Just write it multiple times, since it's never assigned a value anyway
for (int i = 0; i < 3; i++)
bw.Write(dwReserved2);
}
public static uint Size
{
get
{
unsafe
{
return (uint)(sizeof(DDS_HEADER) + (sizeof(int) * 10) + (sizeof(int) * 2));
};
}
}
}
[StructLayout(LayoutKind.Sequential, Pack = 1)]
public struct DDS_HEADER_DXT10
{
public uint dxgiFormat;
public uint resourceDimension;
public uint miscFlag;
public uint arraySize;
public uint miscFlags2;
public void Write(System.IO.BinaryWriter bw)
{
bw.Write(dxgiFormat);
bw.Write(resourceDimension);
bw.Write(miscFlag);
bw.Write(arraySize);
bw.Write(miscFlags2);
}
public static uint Size
{
get
{
unsafe
{
return (uint)sizeof(DDS_HEADER_DXT10);
};
}
}
}
[StructLayout(LayoutKind.Sequential, Pack = 1)]
public unsafe struct DDS_PIXELFORMAT
{
public uint dwSize;
public uint dwFlags;
public uint dwFourCC;
public uint dwRGBBitCount;
public uint dwRBitMask;
public uint dwGBitMask;
public uint dwBBitMask;
public uint dwABitMask;
public DDS_PIXELFORMAT(uint size, uint flags, uint fourCC, uint rgbBitCount, uint rBitMask, uint gBitMask, uint bBitMask, uint aBitMask)
{
dwSize = size;
dwFlags = flags;
dwFourCC = fourCC;
dwRGBBitCount = rgbBitCount;
dwRBitMask = rBitMask;
dwGBitMask = gBitMask;
dwBBitMask = bBitMask;
dwABitMask = aBitMask;
}
public uint GetSize()
{
// 8 uints, each 4 bytes each
return 8 * 4;
}
}
}

View File

@ -0,0 +1,64 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Compression.BSA
{
public interface IBSAReader : IDisposable
{
/// <summary>
/// The files defined by the archive
/// </summary>
IEnumerable<IFile> Files { get; }
ArchiveStateObject State { get; }
}
public interface IBSABuilder : IDisposable
{
void AddFile(FileStateObject state, Stream src);
void Build(string filename);
}
public class ArchiveStateObject
{
public virtual IBSABuilder MakeBuilder()
{
throw new NotImplementedException();
}
}
public class FileStateObject
{
public int Index { get; set; }
public string Path { get; set; }
}
public interface IFile
{
/// <summary>
/// The path of the file inside the archive
/// </summary>
string Path { get; }
/// <summary>
/// The uncompressed file size
/// </summary>
uint Size { get; }
/// <summary>
/// Get the metadata for the file.
/// </summary>
FileStateObject State { get; }
/// <summary>
/// Copies this entry to the given stream. 100% thread safe, the .bsa will be opened multiple times
/// in order to maintain thread-safe access.
/// </summary>
/// <param name="output"></param>
void CopyDataTo(Stream output);
}
}

View File

@ -51,7 +51,6 @@ namespace Compression.BSA
return GetEncoding(version).GetString(acc.ToArray());
}
/// <summary>
/// Returns bytes for a \0 terminated string
/// </summary>

View File

@ -17,7 +17,7 @@ namespace Wabbajack.Common
public static HashSet<string> SupportedArchives = new HashSet<string> {".zip", ".rar", ".7z", ".7zip", ".fomod", ".omod"};
public static HashSet<string> SupportedBSAs = new HashSet<string> {".bsa"};
public static HashSet<string> SupportedBSAs = new HashSet<string> {".bsa", ".ba2"};
public static HashSet<string> ConfigFileExtensions = new HashSet<string> {".json", ".ini", ".yml"};
public static HashSet<string> ESPFileExtensions = new HashSet<string>() { ".esp", ".esm", ".esl"};

View File

@ -5,7 +5,7 @@ using System.Reflection;
using Alphaleonis.Win32.Filesystem;
using Compression.BSA;
using ICSharpCode.SharpZipLib.GZip;
using OMODFramework;
//using OMODFramework;
namespace Wabbajack.Common
{
@ -35,7 +35,7 @@ namespace Wabbajack.Common
{
try
{
if (source.EndsWith(".bsa"))
if (Consts.SupportedBSAs.Any(b => source.ToLower().EndsWith(b)))
ExtractAllWithBSA(source, dest);
else if (source.EndsWith(".exe"))
ExtractAllWithInno(source, dest);
@ -53,19 +53,19 @@ namespace Wabbajack.Common
private static void ExtractAllWithOMOD(string source, string dest)
{
Utils.Log($"Extracting {Path.GetFileName(source)}");
/*Utils.Log($"Extracting {Path.GetFileName(source)}");
Framework f = new Framework();
f.SetTempDirectory(dest);
OMOD omod = new OMOD(source, ref f);
omod.ExtractDataFiles();
omod.ExtractPlugins();
omod.ExtractPlugins();*/
}
private static void ExtractAllWithBSA(string source, string dest)
{
try
{
using (var arch = new BSAReader(source))
using (var arch = BSADispatch.OpenRead(source))
{
arch.Files.PMap(f =>
{
@ -216,7 +216,8 @@ namespace Wabbajack.Common
/// <returns></returns>
public static bool CanExtract(string v)
{
return Consts.SupportedArchives.Contains(v) || v == ".bsa";
v = v.ToLower();
return Consts.SupportedArchives.Contains(v) || Consts.SupportedBSAs.Contains(v);
}
public class Entry

View File

@ -12,6 +12,10 @@ namespace Wabbajack
{
public App()
{
/*
Utils.Log($"Wabbajack Build - {ThisAssembly.Git.Sha}");
SetupHandlers();
var args = Environment.GetCommandLineArgs();
if (args.Length > 1)
{
@ -23,8 +27,19 @@ namespace Wabbajack
Environment.Exit(0);
}
Environment.Exit(1);
}
}*/
}
private void SetupHandlers()
{
AppDomain.CurrentDomain.UnhandledException += AppHandler;
}
private void AppHandler(object sender, UnhandledExceptionEventArgs e)
{
Utils.Log("Uncaught error:");
Utils.Log(((Exception)e.ExceptionObject).ExceptionToString());
}
}
}

View File

@ -407,6 +407,7 @@ namespace Wabbajack
group.PMap(entry =>
{
Info($"Patching {entry.To}");
Status($"Patching {entry.To}");
using (var origin = by_path[string.Join("|", entry.ArchiveHashPath.Skip(1))].OpenRead())
using (var output = new MemoryStream())
{
@ -431,10 +432,15 @@ namespace Wabbajack
var bsa_id = to.Split('\\')[1];
var bsa = InstallDirectives.OfType<CreateBSA>().First(b => b.TempID == bsa_id);
using (var a = new BSAReader(Path.Combine(MO2Folder, bsa.To)))
using (var a = BSADispatch.OpenRead(Path.Combine(MO2Folder, bsa.To)))
{
var file = a.Files.First(e => e.Path == Path.Combine(to.Split('\\').Skip(2).ToArray()));
return file.GetData();
var find = Path.Combine(to.Split('\\').Skip(2).ToArray());
var file = a.Files.First(e => e.Path.Replace('/', '\\') == find);
using (var ms = new MemoryStream())
{
file.CopyDataTo(ms);
return ms.ToArray();
}
}
}
@ -927,7 +933,7 @@ namespace Wabbajack
return source =>
{
if (!Consts.SupportedBSAs.Contains(Path.GetExtension(source.Path))) return null;
if (!Consts.SupportedBSAs.Contains(Path.GetExtension(source.Path).ToLower())) return null;
var default_include = false;
if (source.Path.StartsWith("mods"))
@ -959,20 +965,17 @@ namespace Wabbajack
;
CreateBSA directive;
using (var bsa = new BSAReader(source.AbsolutePath))
using (var bsa = BSADispatch.OpenRead(source.AbsolutePath))
{
directive = new CreateBSA
{
To = source.Path,
TempID = id,
Type = (uint)bsa.HeaderType,
FileFlags = (uint)bsa.FileFlags,
ArchiveFlags = (uint)bsa.ArchiveFlags
State = bsa.State,
FileStates = bsa.Files.Select(f => f.State).ToList()
};
}
;
return directive;
};
}

View File

@ -1,6 +1,7 @@
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using Compression.BSA;
using VFS;
using Wabbajack.Common;
@ -167,15 +168,10 @@ namespace Wabbajack
[Serializable]
public class CreateBSA : Directive
{
public string IsCompressed;
public bool ShareData;
public string TempID;
public uint Type;
public uint Version;
public uint FileFlags { get; set; }
public bool Compress { get; set; }
public uint ArchiveFlags { get; set; }
public ArchiveStateObject State { get; set; }
public List<FileStateObject> FileStates { get; set; }
}
[Serializable]

View File

@ -242,30 +242,21 @@ namespace Wabbajack
{
Status($"Building {bsa.To}");
var source_dir = Path.Combine(Outputfolder, Consts.BSACreationDir, bsa.TempID);
var source_files = Directory.EnumerateFiles(source_dir, "*", SearchOption.AllDirectories)
.Select(e => e.Substring(source_dir.Length + 1))
.ToList();
if (source_files.Count > 0)
using (var a = new BSABuilder())
using (var a = bsa.State.MakeBuilder())
{
bsa.FileStates.PMap(state =>
{
//a.Create(Path.Combine(Outputfolder, bsa.To), (bsa_archive_type_t)bsa.Type, entries);
a.HeaderType = (VersionType)bsa.Type;
a.FileFlags = (FileFlags)bsa.FileFlags;
a.ArchiveFlags = (ArchiveFlags)bsa.ArchiveFlags;
source_files.PMap(f =>
Status($"Adding {state.Path} to BSA");
using (var fs = File.OpenRead(Path.Combine(source_dir, state.Path)))
{
Status($"Adding {f} to BSA");
using (var fs = File.OpenRead(Path.Combine(source_dir, f)))
{
a.AddFile(f, fs);
}
});
a.AddFile(state, fs);
}
});
Info($"Writing {bsa.To}");
a.Build(Path.Combine(Outputfolder, bsa.To));
}
Info($"Writing {bsa.To}");
a.Build(Path.Combine(Outputfolder, bsa.To));
}
});

View File

@ -140,7 +140,7 @@ namespace Wabbajack.Validation
if (nexus_mod_permissions.TryGetValue(p.ArchiveHashPath[0], out var archive))
{
if (!(archive.permissions.CanExtractBSAs ?? true) &&
p.ArchiveHashPath.Skip(1).ButLast().Any(a => Consts.SupportedBSAs.Contains(Path.GetExtension(a))))
p.ArchiveHashPath.Skip(1).ButLast().Any(a => Consts.SupportedBSAs.Contains(Path.GetExtension(a).ToLower())))
{
ValidationErrors.Push($"{p.To} from {archive.archive.NexusURL} is set to disallow BSA Extraction");
}