From 9f088d1bf5bff0d53a584c1580d43a882f8cfe24 Mon Sep 17 00:00:00 2001 From: Sergey Borisov Date: Tue, 16 Jul 2024 00:51:25 +0300 Subject: [PATCH] Multiple small fixes --- invokeai/app/invocations/denoise_latents.py | 7 ++++--- .../backend/stable_diffusion/diffusers_pipeline.py | 11 +---------- .../backend/stable_diffusion/extensions/__init__.py | 12 ------------ 3 files changed, 5 insertions(+), 25 deletions(-) delete mode 100644 invokeai/backend/stable_diffusion/extensions/__init__.py diff --git a/invokeai/app/invocations/denoise_latents.py b/invokeai/app/invocations/denoise_latents.py index c0a74756cb..7563c30223 100644 --- a/invokeai/app/invocations/denoise_latents.py +++ b/invokeai/app/invocations/denoise_latents.py @@ -57,7 +57,7 @@ from invokeai.backend.stable_diffusion.diffusion.conditioning_data import ( ) from invokeai.backend.stable_diffusion.diffusion.custom_atttention import CustomAttnProcessor2_0 from invokeai.backend.stable_diffusion.diffusion_backend import StableDiffusionBackend -from invokeai.backend.stable_diffusion.extensions import PreviewExt +from invokeai.backend.stable_diffusion.extensions.preview import PreviewExt from invokeai.backend.stable_diffusion.extensions_manager import ExtensionsManager from invokeai.backend.stable_diffusion.schedulers import SCHEDULER_MAP from invokeai.backend.stable_diffusion.schedulers.schedulers import SCHEDULER_NAME_VALUES @@ -723,7 +723,8 @@ class DenoiseLatentsInvocation(BaseInvocation): @torch.no_grad() @SilenceWarnings() # This quenches the NSFW nag from diffusers. def _new_invoke(self, context: InvocationContext) -> LatentsOutput: - with ExitStack() as exit_stack: + # TODO: remove supression when extensions which use models added + with ExitStack() as exit_stack: # noqa: F841 ext_manager = ExtensionsManager() device = TorchDevice.choose_torch_device() @@ -804,7 +805,7 @@ class DenoiseLatentsInvocation(BaseInvocation): result_latents = sd_backend.latents_from_embeddings(denoise_ctx, ext_manager) # https://discuss.huggingface.co/t/memory-usage-by-later-pipeline-stages/23699 - result_latents = result_latents.to("cpu") # TODO: detach? + result_latents = result_latents.detach().to("cpu") TorchDevice.empty_cache() name = context.tensors.save(tensor=result_latents) diff --git a/invokeai/backend/stable_diffusion/diffusers_pipeline.py b/invokeai/backend/stable_diffusion/diffusers_pipeline.py index 216e4d3bd1..b3a668518b 100644 --- a/invokeai/backend/stable_diffusion/diffusers_pipeline.py +++ b/invokeai/backend/stable_diffusion/diffusers_pipeline.py @@ -23,20 +23,11 @@ from invokeai.app.services.config.config_default import get_config from invokeai.backend.stable_diffusion.diffusion.conditioning_data import IPAdapterData, TextConditioningData from invokeai.backend.stable_diffusion.diffusion.shared_invokeai_diffusion import InvokeAIDiffuserComponent from invokeai.backend.stable_diffusion.diffusion.unet_attention_patcher import UNetAttentionPatcher, UNetIPAdapterData -from invokeai.backend.stable_diffusion.extensions import PipelineIntermediateState +from invokeai.backend.stable_diffusion.extensions.preview import PipelineIntermediateState from invokeai.backend.util.attention import auto_detect_slice_size from invokeai.backend.util.devices import TorchDevice from invokeai.backend.util.hotfixes import ControlNetModel -# @dataclass -# class PipelineIntermediateState: -# step: int -# order: int -# total_steps: int -# timestep: int -# latents: torch.Tensor -# predicted_original: Optional[torch.Tensor] = None - @dataclass class AddsMaskGuidance: diff --git a/invokeai/backend/stable_diffusion/extensions/__init__.py b/invokeai/backend/stable_diffusion/extensions/__init__.py deleted file mode 100644 index faf0a1e1ec..0000000000 --- a/invokeai/backend/stable_diffusion/extensions/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -""" -Initialization file for the invokeai.backend.stable_diffusion.extensions package -""" - -from invokeai.backend.stable_diffusion.extensions.base import ExtensionBase -from invokeai.backend.stable_diffusion.extensions.preview import PipelineIntermediateState, PreviewExt - -__all__ = [ - "ExtensionBase", - "PipelineIntermediateState", - "PreviewExt", -]