wabbajack/Wabbajack.Lib/Downloaders/HTTPDownloader.cs

255 lines
8.6 KiB
C#
Raw Normal View History

using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Threading.Tasks;
using Newtonsoft.Json;
using Wabbajack.Common;
using Wabbajack.Common.Exceptions;
using Wabbajack.Common.Serialization.Json;
using Wabbajack.Lib.Validation;
namespace Wabbajack.Lib.Downloaders
{
public class HTTPDownloader : IDownloader, IUrlDownloader
{
public async Task<AbstractDownloadState?> GetDownloaderState(dynamic archiveINI, bool quickMode)
{
2019-11-21 15:51:57 +00:00
var url = archiveINI?.General?.directURL;
return GetDownloaderState(url, archiveINI);
}
public AbstractDownloadState? GetDownloaderState(string uri)
{
return GetDownloaderState(uri, null);
}
public AbstractDownloadState? GetDownloaderState(string url, dynamic? archiveINI)
{
if (url != null)
{
var tmp = new State(url);
2019-11-21 15:51:57 +00:00
if (archiveINI?.General?.directURLHeaders != null)
{
2019-11-21 15:51:57 +00:00
tmp.Headers.AddRange(archiveINI?.General.directURLHeaders.Split('|'));
}
return tmp;
}
return null;
}
2019-12-07 02:45:13 +00:00
public async Task Prepare()
{
}
[JsonName("HttpDownloader")]
2020-05-20 03:55:12 +00:00
public class State : AbstractDownloadState, IUpgradingState
{
public string Url { get; }
public List<string> Headers { get; } = new List<string>();
[JsonIgnore]
public Common.Http.Client? Client { get; set; }
[JsonIgnore]
public override object[] PrimaryKey => new object[] { Url };
public State(string url)
{
Url = url;
}
public override bool IsWhitelisted(ServerWhitelist whitelist)
{
return whitelist.AllowedPrefixes.Any(p => Url.StartsWith(p));
}
2020-03-25 22:30:43 +00:00
public override Task<bool> Download(Archive a, AbsolutePath destination)
{
return DoDownload(a, destination, true);
}
2020-03-25 22:30:43 +00:00
public async Task<bool> DoDownload(Archive a, AbsolutePath destination, bool download)
{
2019-12-23 04:24:40 +00:00
if (download)
{
2020-03-25 22:30:43 +00:00
destination.Parent.CreateDirectory();
2019-12-23 04:24:40 +00:00
}
using (var fs = download ? await destination.Create() : null)
{
var client = Client ?? new Common.Http.Client();
client.Headers.Add(("User-Agent", Consts.UserAgent));
foreach (var header in Headers)
{
var idx = header.IndexOf(':');
var k = header.Substring(0, idx);
var v = header.Substring(idx + 1);
client.Headers.Add((k, v));
}
long totalRead = 0;
var bufferSize = 1024 * 32;
Utils.Status($"Starting Download {a.Name ?? Url}", Percent.Zero);
var response = await client.GetAsync(Url);
TOP:
if (!response.IsSuccessStatusCode)
2020-05-22 04:21:32 +00:00
return false;
Stream stream;
try
{
stream = await response.Content.ReadAsStreamAsync();
}
catch (Exception ex)
{
Utils.Error(ex, $"While downloading {Url}");
return false;
}
var headerVar = a.Size == 0 ? "1" : a.Size.ToString();
long header_content_size = 0;
if (response.Content.Headers.Contains("Content-Length"))
{
headerVar = response.Content.Headers.GetValues("Content-Length").FirstOrDefault();
if (headerVar != null)
long.TryParse(headerVar, out header_content_size);
}
if (!download)
{
if (a.Size != 0 && header_content_size != 0)
return a.Size == header_content_size;
return true;
}
var supportsResume = response.Headers.AcceptRanges.FirstOrDefault(f => f == "bytes") != null;
var contentSize = headerVar != null ? long.Parse(headerVar) : 1;
using (var webs = stream)
{
var buffer = new byte[bufferSize];
2020-02-29 00:00:23 +00:00
int readThisCycle = 0;
while (true)
{
int read = 0;
try
{
read = await webs.ReadAsync(buffer, 0, bufferSize);
}
catch (Exception ex)
{
2020-02-29 00:00:23 +00:00
if (readThisCycle == 0)
throw ex;
if (totalRead < contentSize)
{
if (supportsResume)
{
Utils.Log(
$"Abort during download, trying to resume {Url} from {totalRead.ToFileSizeString()}");
var msg = new HttpRequestMessage(HttpMethod.Get, Url);
msg.Headers.Range = new RangeHeaderValue(totalRead, null);
response.Dispose();
response = await client.SendAsync(msg);
goto TOP;
}
2019-12-17 23:43:12 +00:00
throw ex;
}
2019-12-17 23:43:12 +00:00
break;
}
2020-02-29 00:00:23 +00:00
readThisCycle += read;
if (read == 0) break;
2020-02-08 04:35:08 +00:00
Utils.Status($"Downloading {a.Name}", Percent.FactoryPutInRange(totalRead, contentSize));
fs!.Write(buffer, 0, read);
totalRead += read;
}
}
response.Dispose();
return true;
}
}
public override async Task<bool> Verify(Archive a)
{
2020-03-25 22:30:43 +00:00
return await DoDownload(a, ((RelativePath)"").RelativeToEntryPoint(), false);
}
public override IDownloader GetDownloader()
{
return DownloadDispatcher.GetInstance<HTTPDownloader>();
}
public override string GetManifestURL(Archive a)
{
return Url;
}
public override string[] GetMetaIni()
{
if (Headers.Count > 0)
return new [] {"[General]",
$"directURL={Url}",
$"directURLHeaders={string.Join("|", Headers)}"};
else
return new [] {"[General]", $"directURL={Url}"};
}
2020-05-20 03:55:12 +00:00
public async Task<(Archive? Archive, TempFile NewFile)> FindUpgrade(Archive a)
{
var tmpFile = new TempFile();
var newArchive = new Archive(this) {Name = a.Name};
2020-05-20 12:18:47 +00:00
try
{
if (!await Download(newArchive, tmpFile.Path))
return default;
}
catch (HttpException)
2020-05-20 12:18:47 +00:00
{
2020-05-20 03:55:12 +00:00
return default;
2020-05-20 12:18:47 +00:00
}
2020-05-20 03:55:12 +00:00
newArchive.Hash = await tmpFile.Path.FileHashAsync();
newArchive.Size = tmpFile.Path.Size;
2020-05-23 21:03:25 +00:00
if (newArchive.Hash == a.Hash || a.Size > 2_500_000_000 || newArchive.Size > 2_500_000_000)
{
return default;
}
2020-05-20 03:55:12 +00:00
return (newArchive, tmpFile);
}
public bool ValidateUpgrade(AbstractDownloadState newArchiveState)
{
var httpState = (State)newArchiveState;
if (new Uri(httpState.Url).Host.EndsWith(".mediafire.com"))
return false;
2020-05-20 03:55:12 +00:00
return httpState.Url == Url;
}
}
}
}