can read/write two SSE BSAs so far, working out the bugs now

This commit is contained in:
Timothy Baldridge 2019-07-28 22:52:04 -06:00
parent 099021d890
commit 8facc9a1f4
4 changed files with 201 additions and 27 deletions

View File

@ -12,7 +12,7 @@ namespace Compression.BSA.Test
const string TestDir = "c:\\Mod Organizer 2\\mods";
static void Main(string[] args)
{
foreach (var bsa in Directory.EnumerateFiles(TestDir, "*.bsa", SearchOption.AllDirectories))
foreach (var bsa in Directory.EnumerateFiles(TestDir, "*.bsa", SearchOption.AllDirectories).Skip(2))
{
Console.WriteLine($"From {bsa}");
using (var a = new BSAReader(bsa))
@ -35,22 +35,84 @@ namespace Compression.BSA.Test
w.FileFlags = a.FileFlags;
w.HeaderType = a.HeaderType;
foreach (var file in a.Files)
Parallel.ForEach(a.Files, file =>
{
var abs_path = Path.Combine("c:\\tmp\\out", file.Path);
using (var str = File.OpenRead(abs_path))
w.AddFile(file.Path, str);
}
w.RegenFolderRecords();
});
w.Build("c:\\tmp\\built.bsa");
}
break;
using (var b = new BSAReader("c:\\tmp\\built.bsa"))
{
Console.WriteLine($"Performing A/B tests on {bsa}");
Equal((uint)a.ArchiveFlags, (uint)b.ArchiveFlags);
Equal((uint)a.FileFlags, (uint)b.FileFlags);
// Check same number of files
Equal(a.Files.Count(), b.Files.Count());
int idx = 0;
foreach (var pair in Enumerable.Zip(a.Files, b.Files, (ai, bi) => (ai, bi)))
{
idx ++;
Console.WriteLine($" - {pair.ai.Path}");
Equal(pair.ai.Path, pair.bi.Path);
Equal(pair.ai.Compressed, pair.bi.Compressed);
Equal(pair.ai.Size, pair.bi.Size);
//Equal(pair.ai.GetData(), pair.bi.GetData());
}
}
//break;
}
}
}
public static void Equal(uint a, uint b)
{
if (a == b) return;
throw new InvalidDataException($"{a} != {b}");
}
public static void Equal(int a, int b)
{
if (a == b) return;
throw new InvalidDataException($"{a} != {b}");
}
public static void Equal(string a, string b)
{
if (a == b) return;
throw new InvalidDataException($"{a} != {b}");
}
public static void Equal(bool a, bool b)
{
if (a == b) return;
throw new InvalidDataException($"{a} != {b}");
}
public static void Equal(byte[] a, byte[] b)
{
if (a.Length != b.Length)
{
throw new InvalidDataException($"Byte array sizes are not equal");
}
for (var idx = 0; idx < a.Length; idx ++)
{
if (a[idx] != b[idx])
throw new InvalidDataException($"Byte array contents not equal at {idx}");
}
}
}
}

View File

@ -1,4 +1,5 @@
using K4os.Compression.LZ4.Streams;
using K4os.Compression.LZ4;
using K4os.Compression.LZ4.Streams;
using System;
using System.Collections.Generic;
using System.IO;
@ -108,8 +109,17 @@ namespace Compression.BSA
}
}
public bool HasNameBlobs
{
get
{
return (_archiveFlags & 0x100) > 0;
}
}
public void Build(string outputName)
{
RegenFolderRecords();
if (File.Exists(outputName)) File.Delete(outputName);
using (var fs = File.OpenWrite(outputName))
@ -128,13 +138,32 @@ namespace Compression.BSA
wtr.Write(_totalFileNameLength); // totalFileNameLength
wtr.Write(_fileFlags);
uint idx = 0;
foreach (var folder in _folders)
{
folder.WriteFolderRecord(wtr, idx);
idx += 1;
folder.WriteFolderRecord(wtr);
}
foreach(var folder in _folders)
{
if (HasFolderNames)
wtr.Write(folder._nameBytes);
foreach (var file in folder._files)
{
file.WriteFileRecord(wtr);
}
}
foreach(var file in _files)
{
wtr.Write(file._nameBytes);
}
foreach(var file in _files)
{
file.WriteData(wtr);
}
}
}
@ -144,6 +173,15 @@ namespace Compression.BSA
.Select(f => new FolderRecordBuilder(this, f.Key, f.ToList()))
.OrderBy(f => f._hash)
.ToList();
foreach (var folder in _folders)
foreach (var file in folder._files)
file._folder = folder;
_files = (from folder in _folders
from file in folder._files
orderby folder._hash, file._hash
select file).ToList();
}
public void Dispose()
@ -191,7 +229,7 @@ namespace Compression.BSA
public FolderRecordBuilder(BSABuilder bsa, string folderName, IEnumerable<FileEntry> files)
{
_files = files;
_files = files.OrderBy(f => f._hash);
_bsa = bsa;
_hash = folderName.GetBSAHash();
_fileCount = (uint)files.Count();
@ -199,8 +237,9 @@ namespace Compression.BSA
_recordSize = sizeof(ulong) + sizeof(uint) + sizeof(uint);
}
public void WriteFolderRecord(BinaryWriter wtr, uint idx)
public void WriteFolderRecord(BinaryWriter wtr)
{
var idx = _bsa._folders.IndexOf(this);
_offset = (ulong)wtr.BaseStream.Position;
_offset += (ulong)_bsa._folders.Skip((int)idx).Select(f => (long)f.SelfSize).Sum();
_offset += _bsa._totalFileNameLength;
@ -216,21 +255,14 @@ namespace Compression.BSA
}
}
public void WriteFileRecordBlocks(BinaryWriter wtr)
{
if (_bsa.HasFolderNames)
{
wtr.Write(_nameBytes);
foreach (var file in _files)
file.WriteFileRecord(wtr);
}
}
}
public class FileEntry
{
internal FolderRecordBuilder _folder;
internal BSABuilder _bsa;
internal string _path;
internal string _name;
internal string _filenameSource;
internal Stream _bytesSource;
internal bool _flipCompression;
@ -240,13 +272,15 @@ namespace Compression.BSA
internal byte[] _pathBytes;
internal byte[] _rawData;
internal int _originalSize;
private long _offsetOffset;
public FileEntry(BSABuilder bsa, string path, Stream src, bool flipCompression)
{
_bsa = bsa;
_path = path.ToLowerInvariant();
_hash = _path.GetBSAHash();
_nameBytes = System.IO.Path.GetFileName(_path).ToTermString();
_name = System.IO.Path.GetFileName(_path);
_hash = _name.GetBSAHash();
_nameBytes = _name.ToTermString();
_pathBytes = _path.ToTermString();
_flipCompression = flipCompression;
@ -282,8 +316,6 @@ namespace Compression.BSA
}
}
public string Path
{
get
@ -305,8 +337,42 @@ namespace Compression.BSA
}
internal void WriteFileRecord(BinaryWriter wtr)
{
wtr.Write(_hash);
if (_flipCompression)
wtr.Write((uint)_rawData.Length | (0x1 << 30));
else
wtr.Write((uint)_rawData.Length);
_offsetOffset = wtr.BaseStream.Position;
wtr.Write((uint)0xDEADBEEF);
}
internal void WriteData(BinaryWriter wtr)
{
uint offset = (uint)wtr.BaseStream.Position;
wtr.BaseStream.Position = _offsetOffset;
wtr.Write((uint)offset);
wtr.BaseStream.Position = offset;
if (Compressed)
{
if (_bsa.HasNameBlobs)
{
wtr.Write(_path.ToBSString());
}
wtr.Write((uint)_originalSize);
wtr.Write(_rawData);
}
else
{
if (_bsa.HasNameBlobs)
{
wtr.Write(_path.ToBSString());
}
wtr.Write(_rawData);
}
}
}
}

View File

@ -322,9 +322,28 @@ namespace Compression.BSA
}
}
else
{
string _name;
int file_size = _size;
if (_bsa.HasNameBlobs)
{
var name_size = rdr.ReadByte();
file_size -= name_size + 1;
rdr.BaseStream.Position = _offset + 1 + name_size;
}
rdr.BaseStream.CopyToLimit(output, Size);
}
}
}
public byte[] GetData()
{
var ms = new MemoryStream();
CopyDataTo(ms);
return ms.ToArray();
}
}
}

View File

@ -41,7 +41,22 @@ namespace Compression.BSA
var b = Windows1251.GetBytes(val);
var b2 = new byte[b.Length + 2];
b.CopyTo(b2, 1);
b[0] = (byte)b.Length;
b2[0] = (byte)(b.Length + 1);
return b2;
}
/// <summary>
/// Returns bytes for unterminated string with a count at the start
/// </summary>
/// <param name="val"></param>
/// <returns></returns>
public static byte[] ToBSString(this string val)
{
var b = Windows1251.GetBytes(val);
var b2 = new byte[b.Length + 1];
b.CopyTo(b2, 1);
b2[0] = (byte)b.Length;
return b2;
}
@ -107,6 +122,18 @@ namespace Compression.BSA
return (((ulong)(hash2 + hash3)) << 32) + hash1;
}
public static void CopyToLimit(this Stream frm, Stream tw, int limit)
{
byte[] buff = new byte[1024];
while (limit > 0)
{
int to_read = Math.Min(buff.Length, limit);
int read = frm.Read(buff, 0, to_read);
tw.Write(buff, 0, read);
limit -= read;
}
}
}
}