mirror of
https://github.com/wabbajack-tools/wabbajack.git
synced 2024-08-30 18:42:17 +00:00
Make sure to set the content type on uploaded files
This commit is contained in:
parent
e6f5053a48
commit
8beb0b6288
@ -80,33 +80,36 @@ public class WabbajackCDNDownloader : ADownloader<WabbajackCDN>, IUrlDownloader,
|
||||
var definition = (await GetDefinition(state, token))!;
|
||||
await using var fs = destination.Open(FileMode.Create, FileAccess.Write, FileShare.None);
|
||||
|
||||
await definition.Parts.PMapAll(async part =>
|
||||
await definition.Parts.PMapAll<PartDefinition, (MemoryStream, PartDefinition)>(async part =>
|
||||
{
|
||||
using var partJob = await _limiter.Begin(
|
||||
$"Downloading {definition.MungedName} ({part.Index}/{definition.Size})",
|
||||
part.Size, token);
|
||||
var msg = MakeMessage(new Uri(state.Url + $"/parts/{part.Index}"));
|
||||
using var response = await _client.SendAsync(msg, HttpCompletionOption.ResponseHeadersRead, token);
|
||||
if (!response.IsSuccessStatusCode)
|
||||
throw new InvalidDataException($"Bad response for part request for part {part.Index}");
|
||||
|
||||
var length = response.Content.Headers.ContentLength;
|
||||
if (length != part.Size)
|
||||
throw new InvalidDataException(
|
||||
$"Bad part size, expected {part.Size} got {length} for part {part.Index}");
|
||||
|
||||
await using var data = await response.Content.ReadAsStreamAsync(token);
|
||||
|
||||
var ms = new MemoryStream();
|
||||
var hash = await data.HashingCopy(ms, token, partJob);
|
||||
ms.Position = 0;
|
||||
if (hash != part.Hash)
|
||||
return await CircuitBreaker.WithAutoRetryAllAsync<(MemoryStream, PartDefinition)>(_logger, async () =>
|
||||
{
|
||||
throw new Exception(
|
||||
$"Invalid part hash {part.Index} got {hash} instead of {part.Hash} for {definition.MungedName}");
|
||||
}
|
||||
using var partJob = await _limiter.Begin(
|
||||
$"Downloading {definition.MungedName} ({part.Index}/{definition.Size})",
|
||||
part.Size, token);
|
||||
var msg = MakeMessage(new Uri(state.Url + $"/parts/{part.Index}"));
|
||||
using var response = await _client.SendAsync(msg, HttpCompletionOption.ResponseHeadersRead, token);
|
||||
if (!response.IsSuccessStatusCode)
|
||||
throw new InvalidDataException($"Bad response for part request for part {part.Index}");
|
||||
|
||||
return (ms, part);
|
||||
var length = response.Content.Headers.ContentLength;
|
||||
if (length != part.Size)
|
||||
throw new InvalidDataException(
|
||||
$"Bad part size, expected {part.Size} got {length} for part {part.Index}");
|
||||
|
||||
await using var data = await response.Content.ReadAsStreamAsync(token);
|
||||
|
||||
var ms = new MemoryStream();
|
||||
var hash = await data.HashingCopy(ms, token, partJob);
|
||||
ms.Position = 0;
|
||||
if (hash != part.Hash)
|
||||
{
|
||||
throw new Exception(
|
||||
$"Invalid part hash {part.Index} got {hash} instead of {part.Hash} for {definition.MungedName}");
|
||||
}
|
||||
|
||||
return (ms, part);
|
||||
});
|
||||
|
||||
|
||||
}).Do(async rec =>
|
||||
|
@ -29,6 +29,7 @@ public class AuthorFiles
|
||||
private HashSet<RelativePath> _mangledNames;
|
||||
private readonly RecyclableMemoryStreamManager _streamPool;
|
||||
private readonly HttpClient _httpClient;
|
||||
private readonly AbsolutePath _cacheFile;
|
||||
|
||||
private Uri _baseUri => new($"https://r2.wabbajack.org/");
|
||||
|
||||
@ -43,25 +44,25 @@ public class AuthorFiles
|
||||
_bucketName = settings.AuthoredFilesS3.BucketName;
|
||||
_ = PrimeCache();
|
||||
_streamPool = new RecyclableMemoryStreamManager();
|
||||
_cacheFile = _settings.AuthoredFilesS3.BucketCacheFile.ToAbsolutePath();
|
||||
}
|
||||
|
||||
private async Task PrimeCache()
|
||||
{
|
||||
try
|
||||
{
|
||||
var cacheFile = _settings.AuthoredFilesS3.BucketCacheFile.ToAbsolutePath();
|
||||
if (!cacheFile.FileExists())
|
||||
if (!_cacheFile.FileExists())
|
||||
{
|
||||
var allObjects = await AllObjects().ToArrayAsync();
|
||||
foreach (var obje in allObjects)
|
||||
{
|
||||
_allObjects.TryAdd(obje.Key.ToRelativePath(), obje.LastModified.ToFileTimeUtc());
|
||||
}
|
||||
SaveBucketCacheFile(cacheFile);
|
||||
SaveBucketCacheFile(_cacheFile);
|
||||
}
|
||||
else
|
||||
{
|
||||
LoadBucketCacheFile(cacheFile);
|
||||
LoadBucketCacheFile(_cacheFile);
|
||||
}
|
||||
|
||||
|
||||
@ -183,7 +184,8 @@ public class AuthorFiles
|
||||
BucketName = _bucketName,
|
||||
Key = mungedName.ToRelativePath().Combine("parts", part.ToString()).ToString().Replace("\\", "/"),
|
||||
InputStream = ms,
|
||||
DisablePayloadSigning = true
|
||||
DisablePayloadSigning = true,
|
||||
ContentType = "application/octet-stream"
|
||||
});
|
||||
}
|
||||
|
||||
@ -201,7 +203,8 @@ public class AuthorFiles
|
||||
BucketName = _bucketName,
|
||||
Key = definition.MungedName.ToRelativePath().Combine("definition.json.gz").ToString().Replace("\\", "/"),
|
||||
InputStream = ms,
|
||||
DisablePayloadSigning = true
|
||||
DisablePayloadSigning = true,
|
||||
ContentType = "application/octet-stream"
|
||||
});
|
||||
_fileCache.TryAdd(definition.MungedName, new FileDefinitionMetadata
|
||||
{
|
||||
|
Loading…
Reference in New Issue
Block a user