diff --git a/ldm/generate.py b/ldm/generate.py index 79a543247e..2d60a479cd 100644 --- a/ldm/generate.py +++ b/ldm/generate.py @@ -16,6 +16,7 @@ import numpy as np import skimage import torch import transformers +from PIL import Image, ImageOps from diffusers import HeunDiscreteScheduler from diffusers.pipeline_utils import DiffusionPipeline from diffusers.schedulers.scheduling_ddim import DDIMScheduler @@ -26,9 +27,9 @@ from diffusers.schedulers.scheduling_ipndm import IPNDMScheduler from diffusers.schedulers.scheduling_lms_discrete import LMSDiscreteScheduler from diffusers.schedulers.scheduling_pndm import PNDMScheduler from omegaconf import OmegaConf -from PIL import Image, ImageOps from pytorch_lightning import seed_everything, logging +import ldm.invoke.conditioning from ldm.invoke.args import metadata_from_png from ldm.invoke.concepts_lib import Concepts from ldm.invoke.conditioning import get_uc_and_c_and_ec @@ -616,9 +617,9 @@ class Generate: # used by multiple postfixers # todo: cross-attention control uc, c, extra_conditioning_info = get_uc_and_c_and_ec( - prompt, model =self.model, + prompt, model=self.model, skip_normalize_legacy_blend=opt.skip_normalize, - log_tokens =ldm.invoke.conditioning.log_tokenization + log_tokens=ldm.invoke.conditioning.log_tokenization ) if tool in ('gfpgan','codeformer','upscale'):