mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
86 lines
2.4 KiB
Python
86 lines
2.4 KiB
Python
|
import importlib
|
||
|
|
||
|
import torch
|
||
|
import numpy as np
|
||
|
|
||
|
from inspect import isfunction
|
||
|
from PIL import Image, ImageDraw, ImageFont
|
||
|
|
||
|
|
||
|
def log_txt_as_img(wh, xc, size=10):
|
||
|
# wh a tuple of (width, height)
|
||
|
# xc a list of captions to plot
|
||
|
b = len(xc)
|
||
|
txts = list()
|
||
|
for bi in range(b):
|
||
|
txt = Image.new("RGB", wh, color="white")
|
||
|
draw = ImageDraw.Draw(txt)
|
||
|
font = ImageFont.truetype('data/DejaVuSans.ttf', size=size)
|
||
|
nc = int(40 * (wh[0] / 256))
|
||
|
lines = "\n".join(xc[bi][start:start + nc] for start in range(0, len(xc[bi]), nc))
|
||
|
|
||
|
try:
|
||
|
draw.text((0, 0), lines, fill="black", font=font)
|
||
|
except UnicodeEncodeError:
|
||
|
print("Cant encode string for logging. Skipping.")
|
||
|
|
||
|
txt = np.array(txt).transpose(2, 0, 1) / 127.5 - 1.0
|
||
|
txts.append(txt)
|
||
|
txts = np.stack(txts)
|
||
|
txts = torch.tensor(txts)
|
||
|
return txts
|
||
|
|
||
|
|
||
|
def ismap(x):
|
||
|
if not isinstance(x, torch.Tensor):
|
||
|
return False
|
||
|
return (len(x.shape) == 4) and (x.shape[1] > 3)
|
||
|
|
||
|
|
||
|
def isimage(x):
|
||
|
if not isinstance(x,torch.Tensor):
|
||
|
return False
|
||
|
return (len(x.shape) == 4) and (x.shape[1] == 3 or x.shape[1] == 1)
|
||
|
|
||
|
|
||
|
def exists(x):
|
||
|
return x is not None
|
||
|
|
||
|
|
||
|
def default(val, d):
|
||
|
if exists(val):
|
||
|
return val
|
||
|
return d() if isfunction(d) else d
|
||
|
|
||
|
|
||
|
def mean_flat(tensor):
|
||
|
"""
|
||
|
https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/nn.py#L86
|
||
|
Take the mean over all non-batch dimensions.
|
||
|
"""
|
||
|
return tensor.mean(dim=list(range(1, len(tensor.shape))))
|
||
|
|
||
|
|
||
|
def count_params(model, verbose=False):
|
||
|
total_params = sum(p.numel() for p in model.parameters())
|
||
|
if verbose:
|
||
|
print(f"{model.__class__.__name__} has {total_params*1.e-6:.2f} M params.")
|
||
|
return total_params
|
||
|
|
||
|
|
||
|
def instantiate_from_config(config):
|
||
|
if not "target" in config:
|
||
|
if config == '__is_first_stage__':
|
||
|
return None
|
||
|
elif config == "__is_unconditional__":
|
||
|
return None
|
||
|
raise KeyError("Expected key `target` to instantiate.")
|
||
|
return get_obj_from_str(config["target"])(**config.get("params", dict()))
|
||
|
|
||
|
|
||
|
def get_obj_from_str(string, reload=False):
|
||
|
module, cls = string.rsplit(".", 1)
|
||
|
if reload:
|
||
|
module_imp = importlib.import_module(module)
|
||
|
importlib.reload(module_imp)
|
||
|
return getattr(importlib.import_module(module, package=None), cls)
|