2022-08-14 09:02:29 +00:00
|
|
|
import asyncio
|
2022-08-15 05:12:10 +00:00
|
|
|
import httpx
|
2019-08-23 10:36:05 +00:00
|
|
|
import m3u8
|
2022-08-14 09:02:29 +00:00
|
|
|
import os
|
2018-01-25 10:09:20 +00:00
|
|
|
import re
|
2019-08-23 10:36:05 +00:00
|
|
|
import shutil
|
2018-01-25 10:09:20 +00:00
|
|
|
import subprocess
|
|
|
|
import tempfile
|
|
|
|
|
2020-09-03 10:24:36 +00:00
|
|
|
from os import path
|
2020-04-11 14:07:17 +00:00
|
|
|
from pathlib import Path
|
2022-08-20 09:35:07 +00:00
|
|
|
from typing import List, Optional, OrderedDict
|
2021-06-09 13:07:07 +00:00
|
|
|
from urllib.parse import urlparse, urlencode
|
2018-01-25 10:09:20 +00:00
|
|
|
|
2020-04-11 11:08:42 +00:00
|
|
|
from twitchdl import twitch, utils
|
2022-08-14 09:02:29 +00:00
|
|
|
from twitchdl.download import download_file
|
2019-02-09 10:52:15 +00:00
|
|
|
from twitchdl.exceptions import ConsoleError
|
2022-08-14 09:02:29 +00:00
|
|
|
from twitchdl.http import download_all
|
2021-01-07 08:34:14 +00:00
|
|
|
from twitchdl.output import print_out
|
2018-01-25 10:09:20 +00:00
|
|
|
|
|
|
|
|
2020-08-07 14:01:46 +00:00
|
|
|
def _parse_playlists(playlists_m3u8):
|
|
|
|
playlists = m3u8.loads(playlists_m3u8)
|
|
|
|
|
2022-02-05 12:57:49 +00:00
|
|
|
for p in sorted(playlists.playlists, key=lambda p: p.stream_info.resolution is None):
|
|
|
|
if p.stream_info.resolution:
|
|
|
|
name = p.media[0].name
|
|
|
|
description = "x".join(str(r) for r in p.stream_info.resolution)
|
|
|
|
else:
|
|
|
|
name = p.media[0].group_id
|
|
|
|
description = None
|
|
|
|
|
|
|
|
yield name, description, p.uri
|
2020-08-07 14:01:46 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _get_playlist_by_name(playlists, quality):
|
2020-09-29 06:26:40 +00:00
|
|
|
if quality == "source":
|
|
|
|
_, _, uri = playlists[0]
|
|
|
|
return uri
|
|
|
|
|
2020-08-07 14:01:46 +00:00
|
|
|
for name, _, uri in playlists:
|
|
|
|
if name == quality:
|
|
|
|
return uri
|
|
|
|
|
|
|
|
available = ", ".join([name for (name, _, _) in playlists])
|
2020-08-07 14:22:41 +00:00
|
|
|
msg = "Quality '{}' not found. Available qualities are: {}".format(quality, available)
|
2020-08-07 14:01:46 +00:00
|
|
|
raise ConsoleError(msg)
|
|
|
|
|
|
|
|
|
|
|
|
def _select_playlist_interactive(playlists):
|
|
|
|
print_out("\nAvailable qualities:")
|
|
|
|
for n, (name, resolution, uri) in enumerate(playlists):
|
2022-02-05 12:57:49 +00:00
|
|
|
if resolution:
|
|
|
|
print_out("{}) {} [{}]".format(n + 1, name, resolution))
|
|
|
|
else:
|
|
|
|
print_out("{}) {}".format(n + 1, name))
|
2018-01-25 10:09:20 +00:00
|
|
|
|
2020-04-11 11:08:42 +00:00
|
|
|
no = utils.read_int("Choose quality", min=1, max=len(playlists) + 1, default=1)
|
2020-08-07 14:01:46 +00:00
|
|
|
_, _, uri = playlists[no - 1]
|
|
|
|
return uri
|
2018-01-25 10:09:20 +00:00
|
|
|
|
|
|
|
|
2021-11-20 12:31:17 +00:00
|
|
|
def _join_vods(playlist_path, target, overwrite, video):
|
2020-09-03 07:11:23 +00:00
|
|
|
command = [
|
2018-01-25 10:09:20 +00:00
|
|
|
"ffmpeg",
|
2020-09-03 08:33:13 +00:00
|
|
|
"-i", playlist_path,
|
2018-01-25 10:09:20 +00:00
|
|
|
"-c", "copy",
|
2021-11-20 12:31:17 +00:00
|
|
|
"-metadata", "artist={}".format(video["creator"]["displayName"]),
|
|
|
|
"-metadata", "title={}".format(video["title"]),
|
|
|
|
"-metadata", "encoded_by=twitch-dl",
|
2018-01-25 10:09:20 +00:00
|
|
|
"-stats",
|
|
|
|
"-loglevel", "warning",
|
2022-02-05 08:24:44 +00:00
|
|
|
"file:{}".format(target),
|
2020-09-03 07:11:23 +00:00
|
|
|
]
|
2018-01-25 10:09:20 +00:00
|
|
|
|
2020-09-29 08:57:09 +00:00
|
|
|
if overwrite:
|
|
|
|
command.append("-y")
|
|
|
|
|
2020-09-03 07:11:23 +00:00
|
|
|
print_out("<dim>{}</dim>".format(" ".join(command)))
|
|
|
|
result = subprocess.run(command)
|
|
|
|
if result.returncode != 0:
|
|
|
|
raise ConsoleError("Joining files failed")
|
2018-01-25 10:09:20 +00:00
|
|
|
|
|
|
|
|
2022-01-23 08:14:40 +00:00
|
|
|
def _video_target_filename(video, args):
|
|
|
|
date, time = video['publishedAt'].split("T")
|
2022-02-05 08:33:18 +00:00
|
|
|
game = video["game"]["name"] if video["game"] else "Unknown"
|
2022-01-23 08:14:40 +00:00
|
|
|
|
|
|
|
subs = {
|
|
|
|
"channel": video["creator"]["displayName"],
|
|
|
|
"channel_login": video["creator"]["login"],
|
|
|
|
"date": date,
|
|
|
|
"datetime": video["publishedAt"],
|
|
|
|
"format": args.format,
|
2022-02-05 08:33:18 +00:00
|
|
|
"game": game,
|
|
|
|
"game_slug": utils.slugify(game),
|
2022-01-23 08:14:40 +00:00
|
|
|
"id": video["id"],
|
|
|
|
"time": time,
|
|
|
|
"title": utils.titlify(video["title"]),
|
|
|
|
"title_slug": utils.slugify(video["title"]),
|
|
|
|
}
|
|
|
|
|
|
|
|
try:
|
|
|
|
return args.output.format(**subs)
|
|
|
|
except KeyError as e:
|
|
|
|
supported = ", ".join(subs.keys())
|
|
|
|
raise ConsoleError("Invalid key {} used in --output. Supported keys are: {}".format(e, supported))
|
|
|
|
|
|
|
|
|
|
|
|
def _clip_target_filename(clip, args):
|
|
|
|
date, time = clip["createdAt"].split("T")
|
2022-02-05 08:33:18 +00:00
|
|
|
game = clip["game"]["name"] if clip["game"] else "Unknown"
|
2019-01-29 13:46:24 +00:00
|
|
|
|
2020-11-10 08:21:37 +00:00
|
|
|
url = clip["videoQualities"][0]["sourceURL"]
|
|
|
|
_, ext = path.splitext(url)
|
|
|
|
ext = ext.lstrip(".")
|
|
|
|
|
2022-01-23 08:14:40 +00:00
|
|
|
subs = {
|
|
|
|
"channel": clip["broadcaster"]["displayName"],
|
|
|
|
"channel_login": clip["broadcaster"]["login"],
|
|
|
|
"date": date,
|
|
|
|
"datetime": clip["createdAt"],
|
|
|
|
"format": ext,
|
2022-02-05 08:33:18 +00:00
|
|
|
"game": game,
|
|
|
|
"game_slug": utils.slugify(game),
|
2022-01-23 08:14:40 +00:00
|
|
|
"id": clip["id"],
|
2022-02-05 08:36:50 +00:00
|
|
|
"slug": clip["slug"],
|
2022-01-23 08:14:40 +00:00
|
|
|
"time": time,
|
|
|
|
"title": utils.titlify(clip["title"]),
|
|
|
|
"title_slug": utils.slugify(clip["title"]),
|
|
|
|
}
|
|
|
|
|
|
|
|
try:
|
|
|
|
return args.output.format(**subs)
|
|
|
|
except KeyError as e:
|
|
|
|
supported = ", ".join(subs.keys())
|
|
|
|
raise ConsoleError("Invalid key {} used in --output. Supported keys are: {}".format(e, supported))
|
2020-11-10 08:21:37 +00:00
|
|
|
|
|
|
|
|
2022-08-20 09:35:07 +00:00
|
|
|
def _get_vod_paths(playlist, start: Optional[int], end: Optional[int]) -> List[str]:
|
2020-09-03 08:33:13 +00:00
|
|
|
"""Extract unique VOD paths for download from playlist."""
|
|
|
|
files = []
|
2019-08-23 10:36:05 +00:00
|
|
|
vod_start = 0
|
|
|
|
for segment in playlist.segments:
|
|
|
|
vod_end = vod_start + segment.duration
|
|
|
|
|
|
|
|
# `vod_end > start` is used here becuase it's better to download a bit
|
|
|
|
# more than a bit less, similar for the end condition
|
|
|
|
start_condition = not start or vod_end > start
|
|
|
|
end_condition = not end or vod_start < end
|
|
|
|
|
2020-09-03 08:33:13 +00:00
|
|
|
if start_condition and end_condition and segment.uri not in files:
|
|
|
|
files.append(segment.uri)
|
2019-08-23 10:36:05 +00:00
|
|
|
|
|
|
|
vod_start = vod_end
|
|
|
|
|
2020-09-03 08:33:13 +00:00
|
|
|
return files
|
|
|
|
|
2019-08-23 10:36:05 +00:00
|
|
|
|
2022-08-20 09:35:07 +00:00
|
|
|
def _crete_temp_dir(base_uri: str) -> str:
|
2019-08-23 10:36:05 +00:00
|
|
|
"""Create a temp dir to store downloads if it doesn't exist."""
|
2020-09-03 10:24:36 +00:00
|
|
|
path = urlparse(base_uri).path.lstrip("/")
|
|
|
|
temp_dir = Path(tempfile.gettempdir(), "twitch-dl", path)
|
|
|
|
temp_dir.mkdir(parents=True, exist_ok=True)
|
2021-07-31 09:41:10 +00:00
|
|
|
return str(temp_dir)
|
2019-08-23 10:36:05 +00:00
|
|
|
|
|
|
|
|
2020-05-30 08:07:00 +00:00
|
|
|
def download(args):
|
2022-08-20 09:35:07 +00:00
|
|
|
for video_id in args.videos:
|
|
|
|
download_one(video_id, args)
|
2022-08-18 07:09:20 +00:00
|
|
|
|
|
|
|
|
2022-08-20 09:35:07 +00:00
|
|
|
def download_one(video: str, args):
|
2022-08-18 07:09:20 +00:00
|
|
|
video_id = utils.parse_video_identifier(video)
|
2021-01-14 20:38:56 +00:00
|
|
|
if video_id:
|
|
|
|
return _download_video(video_id, args)
|
2020-04-11 14:07:17 +00:00
|
|
|
|
2022-08-18 07:09:20 +00:00
|
|
|
clip_slug = utils.parse_clip_identifier(video)
|
2021-01-14 20:38:56 +00:00
|
|
|
if clip_slug:
|
|
|
|
return _download_clip(clip_slug, args)
|
2020-08-09 09:40:46 +00:00
|
|
|
|
2022-08-18 07:09:20 +00:00
|
|
|
raise ConsoleError("Invalid input: {}".format(video))
|
2020-04-11 14:07:17 +00:00
|
|
|
|
|
|
|
|
2021-06-09 13:07:07 +00:00
|
|
|
def _get_clip_url(clip, quality):
|
2020-08-07 14:22:41 +00:00
|
|
|
qualities = clip["videoQualities"]
|
|
|
|
|
|
|
|
# Quality given as an argument
|
2021-06-09 13:07:07 +00:00
|
|
|
if quality:
|
|
|
|
if quality == "source":
|
2020-09-29 06:26:40 +00:00
|
|
|
return qualities[0]["sourceURL"]
|
|
|
|
|
2021-06-09 13:07:07 +00:00
|
|
|
selected_quality = quality.rstrip("p") # allow 720p as well as 720
|
2020-08-07 14:22:41 +00:00
|
|
|
for q in qualities:
|
|
|
|
if q["quality"] == selected_quality:
|
|
|
|
return q["sourceURL"]
|
|
|
|
|
|
|
|
available = ", ".join([str(q["quality"]) for q in qualities])
|
2021-06-09 13:07:07 +00:00
|
|
|
msg = "Quality '{}' not found. Available qualities are: {}".format(quality, available)
|
2020-08-07 14:22:41 +00:00
|
|
|
raise ConsoleError(msg)
|
|
|
|
|
|
|
|
# Ask user to select quality
|
|
|
|
print_out("\nAvailable qualities:")
|
|
|
|
for n, q in enumerate(qualities):
|
|
|
|
print_out("{}) {} [{} fps]".format(n + 1, q["quality"], q["frameRate"]))
|
|
|
|
print_out()
|
|
|
|
|
|
|
|
no = utils.read_int("Choose quality", min=1, max=len(qualities), default=1)
|
|
|
|
selected_quality = qualities[no - 1]
|
|
|
|
return selected_quality["sourceURL"]
|
|
|
|
|
|
|
|
|
2021-06-09 13:07:07 +00:00
|
|
|
def get_clip_authenticated_url(slug, quality):
|
|
|
|
print_out("<dim>Fetching access token...</dim>")
|
|
|
|
access_token = twitch.get_clip_access_token(slug)
|
|
|
|
|
|
|
|
if not access_token:
|
|
|
|
raise ConsoleError("Access token not found for slug '{}'".format(slug))
|
|
|
|
|
|
|
|
url = _get_clip_url(access_token, quality)
|
|
|
|
|
|
|
|
query = urlencode({
|
|
|
|
"sig": access_token["playbackAccessToken"]["signature"],
|
|
|
|
"token": access_token["playbackAccessToken"]["value"],
|
|
|
|
})
|
|
|
|
|
|
|
|
return "{}?{}".format(url, query)
|
|
|
|
|
|
|
|
|
2022-08-20 09:35:07 +00:00
|
|
|
def _download_clip(slug: str, args) -> None:
|
2020-05-17 12:32:37 +00:00
|
|
|
print_out("<dim>Looking up clip...</dim>")
|
2020-04-11 14:07:17 +00:00
|
|
|
clip = twitch.get_clip(slug)
|
2022-02-05 08:49:17 +00:00
|
|
|
game = clip["game"]["name"] if clip["game"] else "Unknown"
|
2020-04-11 14:07:17 +00:00
|
|
|
|
2020-08-09 09:55:40 +00:00
|
|
|
if not clip:
|
|
|
|
raise ConsoleError("Clip '{}' not found".format(slug))
|
|
|
|
|
2020-04-11 14:07:17 +00:00
|
|
|
print_out("Found: <green>{}</green> by <yellow>{}</yellow>, playing <blue>{}</blue> ({})".format(
|
|
|
|
clip["title"],
|
|
|
|
clip["broadcaster"]["displayName"],
|
2022-02-05 08:49:17 +00:00
|
|
|
game,
|
2020-04-11 14:07:17 +00:00
|
|
|
utils.format_duration(clip["durationSeconds"])
|
|
|
|
))
|
|
|
|
|
2022-01-23 08:14:40 +00:00
|
|
|
target = _clip_target_filename(clip, args)
|
|
|
|
print_out("Target: <blue>{}</blue>".format(target))
|
|
|
|
|
2022-01-25 07:08:17 +00:00
|
|
|
if not args.overwrite and path.exists(target):
|
|
|
|
response = input("File exists. Overwrite? [Y/n]: ")
|
|
|
|
if response.lower().strip() not in ["", "y"]:
|
|
|
|
raise ConsoleError("Aborted")
|
|
|
|
args.overwrite = True
|
|
|
|
|
2021-06-09 13:07:07 +00:00
|
|
|
url = get_clip_authenticated_url(slug, args.quality)
|
2020-08-07 14:22:41 +00:00
|
|
|
print_out("<dim>Selected URL: {}</dim>".format(url))
|
2020-04-11 14:07:17 +00:00
|
|
|
|
2022-01-25 06:59:54 +00:00
|
|
|
print_out("<dim>Downloading clip...</dim>")
|
2020-11-10 08:21:37 +00:00
|
|
|
download_file(url, target)
|
2020-04-11 14:07:17 +00:00
|
|
|
|
2022-01-25 06:59:54 +00:00
|
|
|
print_out("Downloaded: <blue>{}</blue>".format(target))
|
2020-04-11 14:07:17 +00:00
|
|
|
|
2019-02-09 10:52:15 +00:00
|
|
|
|
2022-08-20 09:35:07 +00:00
|
|
|
def _download_video(video_id, args) -> None:
|
2020-05-30 08:07:00 +00:00
|
|
|
if args.start and args.end and args.end <= args.start:
|
2019-06-06 09:06:33 +00:00
|
|
|
raise ConsoleError("End time must be greater than start time")
|
|
|
|
|
2020-05-17 12:32:37 +00:00
|
|
|
print_out("<dim>Looking up video...</dim>")
|
2018-01-25 10:09:20 +00:00
|
|
|
video = twitch.get_video(video_id)
|
|
|
|
|
2021-04-25 11:02:07 +00:00
|
|
|
if not video:
|
|
|
|
raise ConsoleError("Video {} not found".format(video_id))
|
|
|
|
|
2019-04-30 11:34:54 +00:00
|
|
|
print_out("Found: <blue>{}</blue> by <yellow>{}</yellow>".format(
|
2021-01-14 20:38:56 +00:00
|
|
|
video['title'], video['creator']['displayName']))
|
2019-04-30 11:34:54 +00:00
|
|
|
|
2022-01-23 08:14:40 +00:00
|
|
|
target = _video_target_filename(video, args)
|
|
|
|
print_out("Output: <blue>{}</blue>".format(target))
|
|
|
|
|
2022-01-25 07:08:17 +00:00
|
|
|
if not args.overwrite and path.exists(target):
|
|
|
|
response = input("File exists. Overwrite? [Y/n]: ")
|
|
|
|
if response.lower().strip() not in ["", "y"]:
|
|
|
|
raise ConsoleError("Aborted")
|
|
|
|
args.overwrite = True
|
|
|
|
|
2022-11-20 09:44:32 +00:00
|
|
|
# Chapter select or manual offset
|
|
|
|
start, end = _determine_time_range(video_id, args)
|
|
|
|
|
2020-05-17 12:32:37 +00:00
|
|
|
print_out("<dim>Fetching access token...</dim>")
|
2022-06-25 07:59:31 +00:00
|
|
|
access_token = twitch.get_access_token(video_id, auth_token=args.auth_token)
|
2018-01-25 10:09:20 +00:00
|
|
|
|
2020-05-17 12:32:37 +00:00
|
|
|
print_out("<dim>Fetching playlists...</dim>")
|
2020-08-07 14:01:46 +00:00
|
|
|
playlists_m3u8 = twitch.get_playlists(video_id, access_token)
|
|
|
|
playlists = list(_parse_playlists(playlists_m3u8))
|
|
|
|
playlist_uri = (_get_playlist_by_name(playlists, args.quality) if args.quality
|
|
|
|
else _select_playlist_interactive(playlists))
|
2018-01-25 10:09:20 +00:00
|
|
|
|
2020-08-07 14:01:46 +00:00
|
|
|
print_out("<dim>Fetching playlist...</dim>")
|
2022-08-15 05:12:10 +00:00
|
|
|
response = httpx.get(playlist_uri)
|
2019-08-23 10:36:05 +00:00
|
|
|
response.raise_for_status()
|
|
|
|
playlist = m3u8.loads(response.text)
|
2019-06-06 09:06:33 +00:00
|
|
|
|
2020-08-07 14:01:46 +00:00
|
|
|
base_uri = re.sub("/[^/]+$", "/", playlist_uri)
|
2019-08-23 10:36:05 +00:00
|
|
|
target_dir = _crete_temp_dir(base_uri)
|
2022-11-20 09:44:32 +00:00
|
|
|
vod_paths = _get_vod_paths(playlist, start, end)
|
2018-01-25 10:09:20 +00:00
|
|
|
|
2019-08-23 11:08:35 +00:00
|
|
|
# Save playlists for debugging purposes
|
2020-09-03 10:24:36 +00:00
|
|
|
with open(path.join(target_dir, "playlists.m3u8"), "w") as f:
|
2020-08-07 14:01:46 +00:00
|
|
|
f.write(playlists_m3u8)
|
2020-09-03 10:24:36 +00:00
|
|
|
with open(path.join(target_dir, "playlist.m3u8"), "w") as f:
|
2019-08-23 11:08:35 +00:00
|
|
|
f.write(response.text)
|
|
|
|
|
2019-08-23 10:36:05 +00:00
|
|
|
print_out("\nDownloading {} VODs using {} workers to {}".format(
|
2020-09-03 08:33:13 +00:00
|
|
|
len(vod_paths), args.max_workers, target_dir))
|
2022-08-14 09:02:29 +00:00
|
|
|
sources = [base_uri + path for path in vod_paths]
|
|
|
|
targets = [os.path.join(target_dir, "{:05d}.ts".format(k)) for k, _ in enumerate(vod_paths)]
|
2022-08-14 09:13:11 +00:00
|
|
|
asyncio.run(download_all(sources, targets, args.max_workers, rate_limit=args.rate_limit))
|
2020-09-03 08:33:13 +00:00
|
|
|
|
|
|
|
# Make a modified playlist which references downloaded VODs
|
2020-09-03 09:59:44 +00:00
|
|
|
# Keep only the downloaded segments and skip the rest
|
|
|
|
org_segments = playlist.segments.copy()
|
2022-08-14 09:02:29 +00:00
|
|
|
|
|
|
|
path_map = OrderedDict(zip(vod_paths, targets))
|
2020-09-03 09:59:44 +00:00
|
|
|
playlist.segments.clear()
|
|
|
|
for segment in org_segments:
|
|
|
|
if segment.uri in path_map:
|
|
|
|
segment.uri = path_map[segment.uri]
|
|
|
|
playlist.segments.append(segment)
|
|
|
|
|
2020-09-03 10:24:36 +00:00
|
|
|
playlist_path = path.join(target_dir, "playlist_downloaded.m3u8")
|
2020-09-03 08:33:13 +00:00
|
|
|
playlist.dump(playlist_path)
|
2018-01-25 10:09:20 +00:00
|
|
|
|
2020-09-29 07:24:29 +00:00
|
|
|
if args.no_join:
|
|
|
|
print_out("\n\n<dim>Skipping joining files...</dim>")
|
|
|
|
print_out("VODs downloaded to:\n<blue>{}</blue>".format(target_dir))
|
|
|
|
return
|
|
|
|
|
2019-04-30 11:34:54 +00:00
|
|
|
print_out("\n\nJoining files...")
|
2021-11-20 12:31:17 +00:00
|
|
|
_join_vods(playlist_path, target, args.overwrite, video)
|
2018-01-25 10:09:20 +00:00
|
|
|
|
2020-05-30 08:07:00 +00:00
|
|
|
if args.keep:
|
2020-09-03 10:24:36 +00:00
|
|
|
print_out("\n<dim>Temporary files not deleted: {}</dim>".format(target_dir))
|
2019-08-23 08:16:49 +00:00
|
|
|
else:
|
2020-09-03 10:24:36 +00:00
|
|
|
print_out("\n<dim>Deleting temporary files...</dim>")
|
2019-08-23 10:36:05 +00:00
|
|
|
shutil.rmtree(target_dir)
|
2018-01-25 10:09:20 +00:00
|
|
|
|
2020-09-03 08:35:03 +00:00
|
|
|
print_out("\nDownloaded: <green>{}</green>".format(target))
|
2022-11-20 09:44:32 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _determine_time_range(video_id, args):
|
|
|
|
if args.start or args.end:
|
|
|
|
return args.start, args.end
|
|
|
|
|
|
|
|
if args.chapter is not None:
|
|
|
|
print_out("<dim>Fetching chapters...</dim>")
|
|
|
|
chapters = twitch.get_video_chapters(video_id)
|
|
|
|
|
|
|
|
if not chapters:
|
|
|
|
raise ConsoleError("This video has no chapters")
|
|
|
|
|
|
|
|
if args.chapter == 0:
|
|
|
|
chapter = _choose_chapter_interactive(chapters)
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
chapter = chapters[args.chapter - 1]
|
|
|
|
except IndexError:
|
|
|
|
raise ConsoleError(f"Chapter {args.chapter} does not exist. This video has {len(chapters)} chapters.")
|
|
|
|
|
|
|
|
print_out(f'Selected chapter: <blue>{chapter["description"]}</blue>')
|
|
|
|
start = chapter["positionMilliseconds"] // 1000
|
|
|
|
duration = chapter["durationMilliseconds"] // 1000
|
|
|
|
return start, start + duration
|
|
|
|
|
|
|
|
return None, None
|
|
|
|
|
|
|
|
|
|
|
|
def _choose_chapter_interactive(chapters):
|
|
|
|
print_out("\nChapters:")
|
|
|
|
for index, chapter in enumerate(chapters):
|
|
|
|
duration = utils.format_time(chapter["durationMilliseconds"] // 1000)
|
|
|
|
print_out(f'<b>{index + 1})</b> <green>{chapter["description"]}</green> <dim>({duration})</dim>')
|
|
|
|
index = utils.read_int("Select a chapter", 1, len(chapters))
|
|
|
|
chapter = chapters[index - 1]
|
|
|
|
return chapter
|