mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
Merge branch 'main' into refactor/model-manager-2
This commit is contained in:
@ -732,7 +732,9 @@ class ControlNetModel(ModelMixin, ConfigMixin, FromOriginalControlnetMixin):
|
||||
|
||||
controlnet_down_block_res_samples = ()
|
||||
|
||||
for down_block_res_sample, controlnet_block in zip(down_block_res_samples, self.controlnet_down_blocks):
|
||||
for down_block_res_sample, controlnet_block in zip(
|
||||
down_block_res_samples, self.controlnet_down_blocks, strict=True
|
||||
):
|
||||
down_block_res_sample = controlnet_block(down_block_res_sample)
|
||||
controlnet_down_block_res_samples = controlnet_down_block_res_samples + (down_block_res_sample,)
|
||||
|
||||
@ -745,7 +747,9 @@ class ControlNetModel(ModelMixin, ConfigMixin, FromOriginalControlnetMixin):
|
||||
scales = torch.logspace(-1, 0, len(down_block_res_samples) + 1, device=sample.device) # 0.1 to 1.0
|
||||
|
||||
scales = scales * conditioning_scale
|
||||
down_block_res_samples = [sample * scale for sample, scale in zip(down_block_res_samples, scales)]
|
||||
down_block_res_samples = [
|
||||
sample * scale for sample, scale in zip(down_block_res_samples, scales, strict=False)
|
||||
]
|
||||
mid_block_res_sample = mid_block_res_sample * scales[-1] # last one
|
||||
else:
|
||||
down_block_res_samples = [sample * conditioning_scale for sample in down_block_res_samples]
|
||||
|
@ -225,34 +225,34 @@ def basicConfig(**kwargs):
|
||||
|
||||
|
||||
_FACILITY_MAP = (
|
||||
dict(
|
||||
LOG_KERN=syslog.LOG_KERN,
|
||||
LOG_USER=syslog.LOG_USER,
|
||||
LOG_MAIL=syslog.LOG_MAIL,
|
||||
LOG_DAEMON=syslog.LOG_DAEMON,
|
||||
LOG_AUTH=syslog.LOG_AUTH,
|
||||
LOG_LPR=syslog.LOG_LPR,
|
||||
LOG_NEWS=syslog.LOG_NEWS,
|
||||
LOG_UUCP=syslog.LOG_UUCP,
|
||||
LOG_CRON=syslog.LOG_CRON,
|
||||
LOG_SYSLOG=syslog.LOG_SYSLOG,
|
||||
LOG_LOCAL0=syslog.LOG_LOCAL0,
|
||||
LOG_LOCAL1=syslog.LOG_LOCAL1,
|
||||
LOG_LOCAL2=syslog.LOG_LOCAL2,
|
||||
LOG_LOCAL3=syslog.LOG_LOCAL3,
|
||||
LOG_LOCAL4=syslog.LOG_LOCAL4,
|
||||
LOG_LOCAL5=syslog.LOG_LOCAL5,
|
||||
LOG_LOCAL6=syslog.LOG_LOCAL6,
|
||||
LOG_LOCAL7=syslog.LOG_LOCAL7,
|
||||
)
|
||||
{
|
||||
"LOG_KERN": syslog.LOG_KERN,
|
||||
"LOG_USER": syslog.LOG_USER,
|
||||
"LOG_MAIL": syslog.LOG_MAIL,
|
||||
"LOG_DAEMON": syslog.LOG_DAEMON,
|
||||
"LOG_AUTH": syslog.LOG_AUTH,
|
||||
"LOG_LPR": syslog.LOG_LPR,
|
||||
"LOG_NEWS": syslog.LOG_NEWS,
|
||||
"LOG_UUCP": syslog.LOG_UUCP,
|
||||
"LOG_CRON": syslog.LOG_CRON,
|
||||
"LOG_SYSLOG": syslog.LOG_SYSLOG,
|
||||
"LOG_LOCAL0": syslog.LOG_LOCAL0,
|
||||
"LOG_LOCAL1": syslog.LOG_LOCAL1,
|
||||
"LOG_LOCAL2": syslog.LOG_LOCAL2,
|
||||
"LOG_LOCAL3": syslog.LOG_LOCAL3,
|
||||
"LOG_LOCAL4": syslog.LOG_LOCAL4,
|
||||
"LOG_LOCAL5": syslog.LOG_LOCAL5,
|
||||
"LOG_LOCAL6": syslog.LOG_LOCAL6,
|
||||
"LOG_LOCAL7": syslog.LOG_LOCAL7,
|
||||
}
|
||||
if SYSLOG_AVAILABLE
|
||||
else dict()
|
||||
else {}
|
||||
)
|
||||
|
||||
_SOCK_MAP = dict(
|
||||
SOCK_STREAM=socket.SOCK_STREAM,
|
||||
SOCK_DGRAM=socket.SOCK_DGRAM,
|
||||
)
|
||||
_SOCK_MAP = {
|
||||
"SOCK_STREAM": socket.SOCK_STREAM,
|
||||
"SOCK_DGRAM": socket.SOCK_DGRAM,
|
||||
}
|
||||
|
||||
|
||||
class InvokeAIFormatter(logging.Formatter):
|
||||
@ -344,7 +344,7 @@ LOG_FORMATTERS = {
|
||||
|
||||
|
||||
class InvokeAILogger(object):
|
||||
loggers = dict()
|
||||
loggers = {}
|
||||
|
||||
@classmethod
|
||||
def get_logger(
|
||||
@ -364,7 +364,7 @@ class InvokeAILogger(object):
|
||||
@classmethod
|
||||
def get_loggers(cls, config: InvokeAIAppConfig) -> list[logging.Handler]:
|
||||
handler_strs = config.log_handlers
|
||||
handlers = list()
|
||||
handlers = []
|
||||
for handler in handler_strs:
|
||||
handler_name, *args = handler.split("=", 2)
|
||||
args = args[0] if len(args) > 0 else None
|
||||
@ -398,7 +398,7 @@ class InvokeAILogger(object):
|
||||
raise ValueError("syslog is not available on this system")
|
||||
if not args:
|
||||
args = "/dev/log" if Path("/dev/log").exists() else "address:localhost:514"
|
||||
syslog_args = dict()
|
||||
syslog_args = {}
|
||||
try:
|
||||
for a in args.split(","):
|
||||
arg_name, *arg_value = a.split(":", 2)
|
||||
@ -434,7 +434,7 @@ class InvokeAILogger(object):
|
||||
path = url.path
|
||||
port = url.port or 80
|
||||
|
||||
syslog_args = dict()
|
||||
syslog_args = {}
|
||||
for a in arg_list:
|
||||
arg_name, *arg_value = a.split(":", 2)
|
||||
if arg_name == "method":
|
||||
|
@ -29,7 +29,7 @@ def log_txt_as_img(wh, xc, size=10):
|
||||
# wh a tuple of (width, height)
|
||||
# xc a list of captions to plot
|
||||
b = len(xc)
|
||||
txts = list()
|
||||
txts = []
|
||||
for bi in range(b):
|
||||
txt = Image.new("RGB", wh, color="white")
|
||||
draw = ImageDraw.Draw(txt)
|
||||
@ -93,7 +93,7 @@ def instantiate_from_config(config, **kwargs):
|
||||
elif config == "__is_unconditional__":
|
||||
return None
|
||||
raise KeyError("Expected key `target` to instantiate.")
|
||||
return get_obj_from_str(config["target"])(**config.get("params", dict()), **kwargs)
|
||||
return get_obj_from_str(config["target"])(**config.get("params", {}), **kwargs)
|
||||
|
||||
|
||||
def get_obj_from_str(string, reload=False):
|
||||
@ -231,11 +231,12 @@ def rand_perlin_2d(shape, res, device, fade=lambda t: 6 * t**5 - 15 * t**4 + 10
|
||||
angles = 2 * math.pi * rand_val
|
||||
gradients = torch.stack((torch.cos(angles), torch.sin(angles)), dim=-1).to(device)
|
||||
|
||||
tile_grads = (
|
||||
lambda slice1, slice2: gradients[slice1[0] : slice1[1], slice2[0] : slice2[1]]
|
||||
.repeat_interleave(d[0], 0)
|
||||
.repeat_interleave(d[1], 1)
|
||||
)
|
||||
def tile_grads(slice1, slice2):
|
||||
return (
|
||||
gradients[slice1[0] : slice1[1], slice2[0] : slice2[1]]
|
||||
.repeat_interleave(d[0], 0)
|
||||
.repeat_interleave(d[1], 1)
|
||||
)
|
||||
|
||||
def dot(grad, shift):
|
||||
return (
|
||||
|
Reference in New Issue
Block a user