2019-01-29 13:46:24 +00:00
|
|
|
import os
|
|
|
|
import pathlib
|
2018-01-25 10:09:20 +00:00
|
|
|
import re
|
|
|
|
import subprocess
|
|
|
|
import tempfile
|
|
|
|
|
2018-01-26 20:50:55 +00:00
|
|
|
from datetime import datetime
|
2018-01-25 10:09:20 +00:00
|
|
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
|
|
from functools import partial
|
|
|
|
|
|
|
|
from twitchdl import twitch
|
2019-01-29 13:46:24 +00:00
|
|
|
from twitchdl.download import download_file
|
2019-02-09 10:52:15 +00:00
|
|
|
from twitchdl.exceptions import ConsoleError
|
2018-01-25 10:09:20 +00:00
|
|
|
from twitchdl.output import print_out
|
2019-01-29 13:46:24 +00:00
|
|
|
from twitchdl.utils import slugify
|
2018-01-25 10:09:20 +00:00
|
|
|
|
|
|
|
|
|
|
|
def read_int(msg, min, max, default):
|
|
|
|
msg = msg + " [default {}]: ".format(default)
|
|
|
|
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
val = input(msg)
|
|
|
|
if not val:
|
|
|
|
return default
|
|
|
|
if min <= int(val) <= max:
|
|
|
|
return int(val)
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2018-01-26 20:50:55 +00:00
|
|
|
def format_size(bytes_):
|
|
|
|
if bytes_ < 1024:
|
|
|
|
return str(bytes_)
|
|
|
|
|
|
|
|
kilo = bytes_ / 1024
|
|
|
|
if kilo < 1024:
|
|
|
|
return "{:.1f}K".format(kilo)
|
|
|
|
|
|
|
|
mega = kilo / 1024
|
|
|
|
if mega < 1024:
|
|
|
|
return "{:.1f}M".format(mega)
|
|
|
|
|
|
|
|
return "{:.1f}G".format(mega / 1024)
|
|
|
|
|
|
|
|
|
|
|
|
def format_duration(total_seconds):
|
|
|
|
total_seconds = int(total_seconds)
|
|
|
|
hours = total_seconds // 3600
|
|
|
|
remainder = total_seconds % 3600
|
2018-01-25 10:09:20 +00:00
|
|
|
minutes = remainder // 60
|
2018-01-26 20:50:55 +00:00
|
|
|
seconds = total_seconds % 60
|
2018-01-25 10:09:20 +00:00
|
|
|
|
|
|
|
if hours:
|
2018-01-26 20:50:55 +00:00
|
|
|
return "{} h {} min".format(hours, minutes)
|
|
|
|
|
|
|
|
if minutes:
|
|
|
|
return "{} min {} sec".format(minutes, seconds)
|
2018-01-25 10:09:20 +00:00
|
|
|
|
2018-01-26 20:50:55 +00:00
|
|
|
return "{} sec".format(seconds)
|
2018-01-25 10:09:20 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _print_video(video):
|
|
|
|
published_at = video['published_at'].replace('T', ' @ ').replace('Z', '')
|
2018-01-26 20:50:55 +00:00
|
|
|
length = format_duration(video['length'])
|
2018-01-25 10:09:20 +00:00
|
|
|
name = video['channel']['display_name']
|
|
|
|
|
|
|
|
print_out("\n<bold>{}</bold>".format(video['_id'][1:]))
|
|
|
|
print_out("<green>{}</green>".format(video["title"]))
|
|
|
|
print_out("<cyan>{}</cyan> playing <cyan>{}</cyan>".format(name, video['game']))
|
|
|
|
print_out("Published <cyan>{}</cyan> Length: <cyan>{}</cyan> ".format(published_at, length))
|
|
|
|
|
|
|
|
|
|
|
|
def videos(channel_name, **kwargs):
|
|
|
|
videos = twitch.get_channel_videos(channel_name)
|
|
|
|
|
|
|
|
print("Found {} videos".format(videos["_total"]))
|
|
|
|
|
|
|
|
for video in videos['videos']:
|
|
|
|
_print_video(video)
|
|
|
|
|
|
|
|
|
2019-01-29 13:46:24 +00:00
|
|
|
def _select_quality(playlists):
|
2019-04-30 11:34:54 +00:00
|
|
|
print_out("\nAvailable qualities:")
|
2018-01-25 10:09:20 +00:00
|
|
|
for no, v in playlists.items():
|
2019-04-30 11:34:54 +00:00
|
|
|
print_out("{}) {}".format(no, v[0]))
|
2018-01-25 10:09:20 +00:00
|
|
|
|
|
|
|
keys = list(playlists.keys())
|
|
|
|
no = read_int("Choose quality", min=min(keys), max=max(keys), default=keys[0])
|
|
|
|
|
2019-01-29 13:46:24 +00:00
|
|
|
return playlists[no]
|
2018-01-25 10:09:20 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _print_progress(futures):
|
|
|
|
counter = 1
|
|
|
|
total = len(futures)
|
2018-01-26 20:50:55 +00:00
|
|
|
total_size = 0
|
|
|
|
start_time = datetime.now()
|
2018-01-25 10:09:20 +00:00
|
|
|
|
|
|
|
for future in as_completed(futures):
|
2019-01-29 13:46:24 +00:00
|
|
|
size = future.result()
|
2018-01-26 20:50:55 +00:00
|
|
|
percentage = 100 * counter // total
|
2019-01-29 13:46:24 +00:00
|
|
|
total_size += size
|
2018-01-26 20:50:55 +00:00
|
|
|
duration = (datetime.now() - start_time).seconds
|
|
|
|
speed = total_size // duration if duration else 0
|
|
|
|
remaining = (total - counter) * duration / counter
|
|
|
|
|
|
|
|
msg = "Downloaded VOD {}/{} ({}%) total <cyan>{}B</cyan> at <cyan>{}B/s</cyan> remaining <cyan>{}</cyan>".format(
|
|
|
|
counter, total, percentage, format_size(total_size), format_size(speed), format_duration(remaining))
|
|
|
|
|
|
|
|
print_out("\r" + msg.ljust(80), end='')
|
2018-01-25 10:09:20 +00:00
|
|
|
counter += 1
|
|
|
|
|
|
|
|
|
|
|
|
def _download_files(base_url, directory, filenames, max_workers):
|
2019-01-29 13:46:24 +00:00
|
|
|
urls = [base_url.format(f) for f in filenames]
|
|
|
|
paths = ["/".join([directory, f]) for f in filenames]
|
|
|
|
partials = (partial(download_file, url, path) for url, path in zip(urls, paths))
|
2018-01-25 10:09:20 +00:00
|
|
|
|
|
|
|
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
2019-01-29 13:46:24 +00:00
|
|
|
futures = [executor.submit(fn) for fn in partials]
|
2018-01-25 10:09:20 +00:00
|
|
|
_print_progress(futures)
|
|
|
|
|
2019-01-29 13:46:24 +00:00
|
|
|
return paths
|
2018-01-25 10:09:20 +00:00
|
|
|
|
|
|
|
|
2019-01-29 13:46:24 +00:00
|
|
|
def _join_vods(directory, paths, target):
|
2018-01-25 10:09:20 +00:00
|
|
|
input_path = "{}/files.txt".format(directory)
|
|
|
|
|
|
|
|
with open(input_path, 'w') as f:
|
2019-01-29 13:46:24 +00:00
|
|
|
for path in paths:
|
|
|
|
f.write('file {}\n'.format(os.path.basename(path)))
|
2018-01-25 10:09:20 +00:00
|
|
|
|
|
|
|
result = subprocess.run([
|
|
|
|
"ffmpeg",
|
|
|
|
"-f", "concat",
|
|
|
|
"-i", input_path,
|
|
|
|
"-c", "copy",
|
|
|
|
target,
|
|
|
|
"-stats",
|
|
|
|
"-loglevel", "warning",
|
|
|
|
])
|
|
|
|
|
|
|
|
result.check_returncode()
|
|
|
|
|
|
|
|
|
|
|
|
def _video_target_filename(video, format):
|
2019-01-29 13:46:24 +00:00
|
|
|
match = re.search(r"^(\d{4})-(\d{2})-(\d{2})T", video['published_at'])
|
|
|
|
date = "".join(match.groups())
|
|
|
|
|
|
|
|
name = "_".join([
|
|
|
|
date,
|
|
|
|
video['_id'][1:],
|
|
|
|
video['channel']['name'],
|
|
|
|
slugify(video['title']),
|
|
|
|
])
|
|
|
|
|
|
|
|
return name + "." + format
|
2018-01-25 10:09:20 +00:00
|
|
|
|
|
|
|
|
2019-02-09 10:52:15 +00:00
|
|
|
def parse_video_id(video_id):
|
|
|
|
"""This can be either a integer ID or an URL to the video on twitch."""
|
|
|
|
if re.search(r"^\d+$", video_id):
|
|
|
|
return int(video_id)
|
|
|
|
|
2019-02-20 15:04:23 +00:00
|
|
|
match = re.search(r"^https://www.twitch.tv/videos/(\d+)(\?.+)?$", video_id)
|
2019-02-09 10:52:15 +00:00
|
|
|
if match:
|
|
|
|
return int(match.group(1))
|
|
|
|
|
|
|
|
raise ConsoleError("Invalid video ID given, expected integer ID or Twitch URL")
|
|
|
|
|
|
|
|
|
2018-01-25 10:09:20 +00:00
|
|
|
def download(video_id, max_workers, format='mkv', **kwargs):
|
2019-02-09 10:52:15 +00:00
|
|
|
video_id = parse_video_id(video_id)
|
|
|
|
|
2019-04-30 11:34:54 +00:00
|
|
|
print_out("Looking up video...")
|
2018-01-25 10:09:20 +00:00
|
|
|
video = twitch.get_video(video_id)
|
|
|
|
|
2019-04-30 11:34:54 +00:00
|
|
|
print_out("Found: <blue>{}</blue> by <yellow>{}</yellow>".format(
|
|
|
|
video['title'], video['channel']['display_name']))
|
|
|
|
|
|
|
|
print_out("Fetching access token...")
|
2018-01-25 10:09:20 +00:00
|
|
|
access_token = twitch.get_access_token(video_id)
|
|
|
|
|
2019-04-30 11:34:54 +00:00
|
|
|
print_out("Fetching playlists...")
|
2018-01-25 10:09:20 +00:00
|
|
|
playlists = twitch.get_playlists(video_id, access_token)
|
2019-01-29 13:46:24 +00:00
|
|
|
quality, playlist_url = _select_quality(playlists)
|
2018-01-25 10:09:20 +00:00
|
|
|
|
2019-04-30 11:34:54 +00:00
|
|
|
print_out("\nFetching playlist...")
|
2018-01-25 10:09:20 +00:00
|
|
|
base_url, filenames = twitch.get_playlist_urls(playlist_url)
|
|
|
|
|
2019-01-29 13:46:24 +00:00
|
|
|
# Create a temp dir to store downloads if it doesn't exist
|
|
|
|
directory = '{}/twitch-dl/{}/{}'.format(tempfile.gettempdir(), video_id, quality)
|
|
|
|
pathlib.Path(directory).mkdir(parents=True, exist_ok=True)
|
2019-04-30 11:34:54 +00:00
|
|
|
print_out("Download dir: {}".format(directory))
|
2019-01-29 13:46:24 +00:00
|
|
|
|
2019-04-30 11:34:54 +00:00
|
|
|
print_out("Downloading VODs with {} workers...".format(max_workers))
|
2019-01-29 13:46:24 +00:00
|
|
|
paths = _download_files(base_url, directory, filenames, max_workers)
|
2018-01-25 10:09:20 +00:00
|
|
|
|
2019-04-30 11:34:54 +00:00
|
|
|
print_out("\n\nJoining files...")
|
2019-01-29 13:46:24 +00:00
|
|
|
target = _video_target_filename(video, format)
|
|
|
|
_join_vods(directory, paths, target)
|
2018-01-25 10:09:20 +00:00
|
|
|
|
2019-04-30 11:34:54 +00:00
|
|
|
print_out("\nDeleting vods...")
|
2019-01-29 13:46:24 +00:00
|
|
|
for path in paths:
|
|
|
|
os.unlink(path)
|
2018-01-25 10:09:20 +00:00
|
|
|
|
2019-04-30 11:34:54 +00:00
|
|
|
print_out("\nDownloaded: {}".format(target))
|