Merge branch 'main' into bugfix/trim-whitespace-from-urls

This commit is contained in:
Lincoln Stein 2023-10-12 22:30:44 -04:00 committed by GitHub
commit 334dcf71c4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 27 additions and 28 deletions

View File

@ -20,6 +20,7 @@ from multiprocessing import Process
from multiprocessing.connection import Connection, Pipe
from pathlib import Path
from shutil import get_terminal_size
from typing import Optional
import npyscreen
import torch
@ -630,21 +631,23 @@ def ask_user_for_prediction_type(model_path: Path, tui_conn: Connection = None)
return _ask_user_for_pt_cmdline(model_path)
def _ask_user_for_pt_cmdline(model_path: Path) -> SchedulerPredictionType:
def _ask_user_for_pt_cmdline(model_path: Path) -> Optional[SchedulerPredictionType]:
choices = [SchedulerPredictionType.Epsilon, SchedulerPredictionType.VPrediction, None]
print(
f"""
Please select the type of the V2 checkpoint named {model_path.name}:
[1] A model based on Stable Diffusion v2 trained on 512 pixel images (SD-2-base)
[2] A model based on Stable Diffusion v2 trained on 768 pixel images (SD-2-768)
[3] Skip this model and come back later.
Please select the scheduler prediction type of the checkpoint named {model_path.name}:
[1] "epsilon" - most v1.5 models and v2 models trained on 512 pixel images
[2] "vprediction" - v2 models trained on 768 pixel images and a few v1.5 models
[3] Accept the best guess; you can fix it in the Web UI later
"""
)
choice = None
ok = False
while not ok:
try:
choice = input("select> ").strip()
choice = input("select [3]> ").strip()
if not choice:
return None
choice = choices[int(choice) - 1]
ok = True
except (ValueError, IndexError):
@ -655,7 +658,6 @@ Please select the type of the V2 checkpoint named {model_path.name}:
def _ask_user_for_pt_tui(model_path: Path, tui_conn: Connection) -> SchedulerPredictionType:
try:
tui_conn.send_bytes(f"*need v2 config for:{model_path}".encode("utf-8"))
# note that we don't do any status checking here
response = tui_conn.recv_bytes().decode("utf-8")
@ -665,12 +667,9 @@ def _ask_user_for_pt_tui(model_path: Path, tui_conn: Connection) -> SchedulerPre
return SchedulerPredictionType.epsilon
elif response == "v":
return SchedulerPredictionType.VPrediction
elif response == "abort":
logger.info("Conversion aborted")
elif response == "guess":
return None
else:
return response
except Exception:
return None

View File

@ -381,12 +381,12 @@ def select_stable_diffusion_config_file(
wrap: bool = True,
model_name: str = "Unknown",
):
message = f"Please select the correct base model for the V2 checkpoint named '{model_name}'. Press <CANCEL> to skip installation."
message = f"Please select the correct prediction type for the checkpoint named '{model_name}'. Press <CANCEL> to skip installation."
title = "CONFIG FILE SELECTION"
options = [
"An SD v2.x base model (512 pixels; no 'parameterization:' line in its yaml file)",
"An SD v2.x v-predictive model (768 pixels; 'parameterization: \"v\"' line in its yaml file)",
"Skip installation for now and come back later",
"'epsilon' - most v1.5 models and v2 models trained on 512 pixel images",
"'vprediction' - v2 models trained on 768 pixel images and a few v1.5 models)",
"Accept the best guess; you can fix it in the Web UI later",
]
F = ConfirmCancelPopup(
@ -410,7 +410,7 @@ def select_stable_diffusion_config_file(
choice = F.add(
npyscreen.SelectOne,
values=options,
value=[0],
value=[2],
max_height=len(options) + 1,
scroll_exit=True,
)
@ -420,5 +420,5 @@ def select_stable_diffusion_config_file(
if not F.value:
return None
assert choice.value[0] in range(0, 3), "invalid choice"
choices = ["epsilon", "v", "abort"]
choices = ["epsilon", "v", "guess"]
return choices[choice.value[0]]