2019-10-12 21:37:16 +00:00
|
|
|
|
using System;
|
|
|
|
|
using System.Collections.Generic;
|
2019-12-01 21:26:11 +00:00
|
|
|
|
using System.IO;
|
2019-10-12 21:37:16 +00:00
|
|
|
|
using System.Linq;
|
2019-10-16 23:05:51 +00:00
|
|
|
|
using System.Net;
|
2019-10-12 21:37:16 +00:00
|
|
|
|
using System.Net.Http;
|
2019-12-17 23:17:44 +00:00
|
|
|
|
using System.Net.Http.Headers;
|
|
|
|
|
using System.Reflection.Emit;
|
2019-12-06 05:29:17 +00:00
|
|
|
|
using System.Threading.Tasks;
|
2020-01-03 22:15:55 +00:00
|
|
|
|
using System.Web;
|
2019-12-17 23:17:44 +00:00
|
|
|
|
using Windows.Networking.BackgroundTransfer;
|
2019-10-26 19:52:42 +00:00
|
|
|
|
using Ceras;
|
2019-12-17 23:17:44 +00:00
|
|
|
|
using SharpCompress.Common;
|
2019-10-12 21:37:16 +00:00
|
|
|
|
using Wabbajack.Common;
|
2019-10-16 03:10:34 +00:00
|
|
|
|
using Wabbajack.Lib.Validation;
|
2019-10-12 21:37:16 +00:00
|
|
|
|
using File = Alphaleonis.Win32.Filesystem.File;
|
|
|
|
|
|
2019-10-16 03:10:34 +00:00
|
|
|
|
namespace Wabbajack.Lib.Downloaders
|
2019-10-12 21:37:16 +00:00
|
|
|
|
{
|
2019-10-16 11:44:45 +00:00
|
|
|
|
public class HTTPDownloader : IDownloader, IUrlDownloader
|
2019-10-12 21:37:16 +00:00
|
|
|
|
{
|
|
|
|
|
|
2019-12-07 03:50:50 +00:00
|
|
|
|
public async Task<AbstractDownloadState> GetDownloaderState(dynamic archiveINI)
|
2019-10-12 21:37:16 +00:00
|
|
|
|
{
|
2019-11-21 15:51:57 +00:00
|
|
|
|
var url = archiveINI?.General?.directURL;
|
|
|
|
|
return GetDownloaderState(url, archiveINI);
|
2019-10-16 11:44:45 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public AbstractDownloadState GetDownloaderState(string uri)
|
|
|
|
|
{
|
|
|
|
|
return GetDownloaderState(uri, null);
|
|
|
|
|
}
|
|
|
|
|
|
2019-11-21 15:51:57 +00:00
|
|
|
|
public AbstractDownloadState GetDownloaderState(string url, dynamic archiveINI)
|
2019-10-16 11:44:45 +00:00
|
|
|
|
{
|
2019-10-12 21:37:16 +00:00
|
|
|
|
if (url != null)
|
|
|
|
|
{
|
|
|
|
|
var tmp = new State
|
|
|
|
|
{
|
|
|
|
|
Url = url
|
|
|
|
|
};
|
2019-11-21 15:51:57 +00:00
|
|
|
|
if (archiveINI?.General?.directURLHeaders != null)
|
2019-10-12 21:37:16 +00:00
|
|
|
|
{
|
|
|
|
|
tmp.Headers = new List<string>();
|
2019-11-21 15:51:57 +00:00
|
|
|
|
tmp.Headers.AddRange(archiveINI?.General.directURLHeaders.Split('|'));
|
2019-10-12 21:37:16 +00:00
|
|
|
|
}
|
|
|
|
|
return tmp;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return null;
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-07 02:45:13 +00:00
|
|
|
|
public async Task Prepare()
|
2019-10-12 22:15:20 +00:00
|
|
|
|
{
|
|
|
|
|
}
|
|
|
|
|
|
2019-10-26 19:52:42 +00:00
|
|
|
|
[MemberConfig(TargetMember.All)]
|
2019-10-12 21:37:16 +00:00
|
|
|
|
public class State : AbstractDownloadState
|
|
|
|
|
{
|
|
|
|
|
public string Url { get; set; }
|
|
|
|
|
|
|
|
|
|
public List<string> Headers { get; set; }
|
|
|
|
|
|
2019-10-26 19:52:42 +00:00
|
|
|
|
[Exclude]
|
2019-10-21 19:05:03 +00:00
|
|
|
|
public HttpClient Client { get; set; }
|
|
|
|
|
|
2020-01-01 16:19:06 +00:00
|
|
|
|
public override object[] PrimaryKey { get => new object[] {Url};}
|
|
|
|
|
|
2019-10-12 21:37:16 +00:00
|
|
|
|
public override bool IsWhitelisted(ServerWhitelist whitelist)
|
|
|
|
|
{
|
|
|
|
|
return whitelist.AllowedPrefixes.Any(p => Url.StartsWith(p));
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-06 05:29:17 +00:00
|
|
|
|
public override Task Download(Archive a, string destination)
|
2019-10-12 21:37:16 +00:00
|
|
|
|
{
|
2019-12-06 05:29:17 +00:00
|
|
|
|
return DoDownload(a, destination, true);
|
2019-10-12 21:37:16 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-12-06 05:29:17 +00:00
|
|
|
|
public async Task<bool> DoDownload(Archive a, string destination, bool download)
|
2019-10-12 21:37:16 +00:00
|
|
|
|
{
|
2019-12-23 04:24:40 +00:00
|
|
|
|
if (download)
|
|
|
|
|
{
|
|
|
|
|
var parent = Directory.GetParent(destination);
|
|
|
|
|
if (!Directory.Exists(parent.FullName))
|
|
|
|
|
Directory.CreateDirectory(parent.FullName);
|
|
|
|
|
}
|
2019-12-18 10:45:48 +00:00
|
|
|
|
|
2019-12-17 23:17:44 +00:00
|
|
|
|
using (var fs = download ? File.OpenWrite(destination) : null)
|
|
|
|
|
{
|
|
|
|
|
var client = Client ?? new HttpClient();
|
|
|
|
|
client.DefaultRequestHeaders.Add("User-Agent", Consts.UserAgent);
|
|
|
|
|
|
|
|
|
|
if (Headers != null)
|
|
|
|
|
foreach (var header in Headers)
|
|
|
|
|
{
|
|
|
|
|
var idx = header.IndexOf(':');
|
|
|
|
|
var k = header.Substring(0, idx);
|
|
|
|
|
var v = header.Substring(idx + 1);
|
|
|
|
|
client.DefaultRequestHeaders.Add(k, v);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
long totalRead = 0;
|
|
|
|
|
var bufferSize = 1024 * 32;
|
|
|
|
|
|
|
|
|
|
Utils.Status($"Starting Download {a?.Name ?? Url}", 0);
|
|
|
|
|
var response = await client.GetAsync(Url, HttpCompletionOption.ResponseHeadersRead);
|
|
|
|
|
TOP:
|
|
|
|
|
|
2020-01-03 22:15:55 +00:00
|
|
|
|
if (!response.IsSuccessStatusCode)
|
|
|
|
|
throw new HttpException((int)response.StatusCode, response.ReasonPhrase);
|
|
|
|
|
|
2019-12-17 23:17:44 +00:00
|
|
|
|
Stream stream;
|
|
|
|
|
try
|
2019-10-12 21:37:16 +00:00
|
|
|
|
{
|
2019-12-17 23:17:44 +00:00
|
|
|
|
stream = await response.Content.ReadAsStreamAsync();
|
|
|
|
|
}
|
|
|
|
|
catch (Exception ex)
|
|
|
|
|
{
|
|
|
|
|
Utils.Error(ex, $"While downloading {Url}");
|
|
|
|
|
return false;
|
2019-10-12 21:37:16 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-12-17 23:17:44 +00:00
|
|
|
|
if (!download)
|
|
|
|
|
return true;
|
2019-10-12 21:37:16 +00:00
|
|
|
|
|
2019-12-17 23:17:44 +00:00
|
|
|
|
var headerVar = a.Size == 0 ? "1" : a.Size.ToString();
|
|
|
|
|
if (response.Content.Headers.Contains("Content-Length"))
|
|
|
|
|
headerVar = response.Content.Headers.GetValues("Content-Length").FirstOrDefault();
|
|
|
|
|
var supportsResume = response.Headers.AcceptRanges.FirstOrDefault(f => f == "bytes") != null;
|
2019-10-12 21:37:16 +00:00
|
|
|
|
|
2019-12-17 23:17:44 +00:00
|
|
|
|
var contentSize = headerVar != null ? long.Parse(headerVar) : 1;
|
2019-10-12 21:37:16 +00:00
|
|
|
|
|
2019-12-17 23:17:44 +00:00
|
|
|
|
using (var webs = stream)
|
|
|
|
|
{
|
|
|
|
|
var buffer = new byte[bufferSize];
|
|
|
|
|
int read_this_cycle = 0;
|
|
|
|
|
|
|
|
|
|
while (true)
|
|
|
|
|
{
|
|
|
|
|
int read = 0;
|
|
|
|
|
try
|
|
|
|
|
{
|
|
|
|
|
read = await webs.ReadAsync(buffer, 0, bufferSize);
|
|
|
|
|
}
|
|
|
|
|
catch (Exception ex)
|
|
|
|
|
{
|
|
|
|
|
if (read_this_cycle == 0)
|
|
|
|
|
throw ex;
|
|
|
|
|
|
|
|
|
|
if (totalRead < contentSize)
|
|
|
|
|
{
|
|
|
|
|
if (supportsResume)
|
|
|
|
|
{
|
|
|
|
|
Utils.Log(
|
|
|
|
|
$"Abort during download, trying to resume {Url} from {totalRead.ToFileSizeString()}");
|
|
|
|
|
|
|
|
|
|
var msg = new HttpRequestMessage(HttpMethod.Get, Url);
|
|
|
|
|
msg.Headers.Range = new RangeHeaderValue(totalRead, null);
|
|
|
|
|
response = await client.SendAsync(msg,
|
|
|
|
|
HttpCompletionOption.ResponseHeadersRead);
|
|
|
|
|
goto TOP;
|
|
|
|
|
}
|
2019-12-17 23:43:12 +00:00
|
|
|
|
throw ex;
|
2019-12-17 23:17:44 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-12-17 23:43:12 +00:00
|
|
|
|
break;
|
2019-12-17 23:17:44 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
read_this_cycle += read;
|
|
|
|
|
|
|
|
|
|
if (read == 0) break;
|
|
|
|
|
Utils.Status($"Downloading {a.Name}", (int)(totalRead * 100 / contentSize));
|
|
|
|
|
|
|
|
|
|
fs.Write(buffer, 0, read);
|
|
|
|
|
totalRead += read;
|
|
|
|
|
}
|
2019-10-12 21:37:16 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-12-17 23:17:44 +00:00
|
|
|
|
return true;
|
|
|
|
|
}
|
2019-10-12 21:37:16 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-12-06 05:29:17 +00:00
|
|
|
|
public override async Task<bool> Verify()
|
2019-10-12 21:37:16 +00:00
|
|
|
|
{
|
2019-12-06 05:29:17 +00:00
|
|
|
|
return await DoDownload(new Archive {Name = ""}, "", false);
|
2019-10-12 21:37:16 +00:00
|
|
|
|
}
|
2019-10-12 22:15:20 +00:00
|
|
|
|
|
|
|
|
|
public override IDownloader GetDownloader()
|
|
|
|
|
{
|
|
|
|
|
return DownloadDispatcher.GetInstance<HTTPDownloader>();
|
|
|
|
|
}
|
2019-10-12 22:54:25 +00:00
|
|
|
|
|
|
|
|
|
public override string GetReportEntry(Archive a)
|
|
|
|
|
{
|
2019-10-13 03:06:44 +00:00
|
|
|
|
return $"* [{a.Name} - {Url}]({Url})";
|
2019-10-12 22:54:25 +00:00
|
|
|
|
}
|
2019-10-12 21:37:16 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|