diff --git a/Wabbajack.Downloaders.WabbajackCDN/WabbajackCDNDownloader.cs b/Wabbajack.Downloaders.WabbajackCDN/WabbajackCDNDownloader.cs index 408941ae..95d265c3 100644 --- a/Wabbajack.Downloaders.WabbajackCDN/WabbajackCDNDownloader.cs +++ b/Wabbajack.Downloaders.WabbajackCDN/WabbajackCDNDownloader.cs @@ -80,33 +80,36 @@ public class WabbajackCDNDownloader : ADownloader, IUrlDownloader, var definition = (await GetDefinition(state, token))!; await using var fs = destination.Open(FileMode.Create, FileAccess.Write, FileShare.None); - await definition.Parts.PMapAll(async part => + await definition.Parts.PMapAll(async part => { - using var partJob = await _limiter.Begin( - $"Downloading {definition.MungedName} ({part.Index}/{definition.Size})", - part.Size, token); - var msg = MakeMessage(new Uri(state.Url + $"/parts/{part.Index}")); - using var response = await _client.SendAsync(msg, HttpCompletionOption.ResponseHeadersRead, token); - if (!response.IsSuccessStatusCode) - throw new InvalidDataException($"Bad response for part request for part {part.Index}"); - - var length = response.Content.Headers.ContentLength; - if (length != part.Size) - throw new InvalidDataException( - $"Bad part size, expected {part.Size} got {length} for part {part.Index}"); - - await using var data = await response.Content.ReadAsStreamAsync(token); - - var ms = new MemoryStream(); - var hash = await data.HashingCopy(ms, token, partJob); - ms.Position = 0; - if (hash != part.Hash) + return await CircuitBreaker.WithAutoRetryAllAsync<(MemoryStream, PartDefinition)>(_logger, async () => { - throw new Exception( - $"Invalid part hash {part.Index} got {hash} instead of {part.Hash} for {definition.MungedName}"); - } + using var partJob = await _limiter.Begin( + $"Downloading {definition.MungedName} ({part.Index}/{definition.Size})", + part.Size, token); + var msg = MakeMessage(new Uri(state.Url + $"/parts/{part.Index}")); + using var response = await _client.SendAsync(msg, HttpCompletionOption.ResponseHeadersRead, token); + if (!response.IsSuccessStatusCode) + throw new InvalidDataException($"Bad response for part request for part {part.Index}"); - return (ms, part); + var length = response.Content.Headers.ContentLength; + if (length != part.Size) + throw new InvalidDataException( + $"Bad part size, expected {part.Size} got {length} for part {part.Index}"); + + await using var data = await response.Content.ReadAsStreamAsync(token); + + var ms = new MemoryStream(); + var hash = await data.HashingCopy(ms, token, partJob); + ms.Position = 0; + if (hash != part.Hash) + { + throw new Exception( + $"Invalid part hash {part.Index} got {hash} instead of {part.Hash} for {definition.MungedName}"); + } + + return (ms, part); + }); }).Do(async rec => diff --git a/Wabbajack.Server/DataModels/AuthorFiles.cs b/Wabbajack.Server/DataModels/AuthorFiles.cs index 28eca7be..c9ff706b 100644 --- a/Wabbajack.Server/DataModels/AuthorFiles.cs +++ b/Wabbajack.Server/DataModels/AuthorFiles.cs @@ -29,6 +29,7 @@ public class AuthorFiles private HashSet _mangledNames; private readonly RecyclableMemoryStreamManager _streamPool; private readonly HttpClient _httpClient; + private readonly AbsolutePath _cacheFile; private Uri _baseUri => new($"https://r2.wabbajack.org/"); @@ -43,25 +44,25 @@ public class AuthorFiles _bucketName = settings.AuthoredFilesS3.BucketName; _ = PrimeCache(); _streamPool = new RecyclableMemoryStreamManager(); + _cacheFile = _settings.AuthoredFilesS3.BucketCacheFile.ToAbsolutePath(); } private async Task PrimeCache() { try { - var cacheFile = _settings.AuthoredFilesS3.BucketCacheFile.ToAbsolutePath(); - if (!cacheFile.FileExists()) + if (!_cacheFile.FileExists()) { var allObjects = await AllObjects().ToArrayAsync(); foreach (var obje in allObjects) { _allObjects.TryAdd(obje.Key.ToRelativePath(), obje.LastModified.ToFileTimeUtc()); } - SaveBucketCacheFile(cacheFile); + SaveBucketCacheFile(_cacheFile); } else { - LoadBucketCacheFile(cacheFile); + LoadBucketCacheFile(_cacheFile); } @@ -183,7 +184,8 @@ public class AuthorFiles BucketName = _bucketName, Key = mungedName.ToRelativePath().Combine("parts", part.ToString()).ToString().Replace("\\", "/"), InputStream = ms, - DisablePayloadSigning = true + DisablePayloadSigning = true, + ContentType = "application/octet-stream" }); } @@ -201,7 +203,8 @@ public class AuthorFiles BucketName = _bucketName, Key = definition.MungedName.ToRelativePath().Combine("definition.json.gz").ToString().Replace("\\", "/"), InputStream = ms, - DisablePayloadSigning = true + DisablePayloadSigning = true, + ContentType = "application/octet-stream" }); _fileCache.TryAdd(definition.MungedName, new FileDefinitionMetadata {