mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
isort wip 3
This commit is contained in:
parent
caea6d11c6
commit
e88d7c242f
@ -1,15 +1,19 @@
|
|||||||
"""
|
"""
|
||||||
Initialization file for invokeai.backend.model_management
|
Initialization file for invokeai.backend.model_management
|
||||||
"""
|
"""
|
||||||
from .model_manager import ModelManager, ModelInfo, AddModelResult, SchedulerPredictionType # noqa: F401
|
# This import must be first
|
||||||
from .model_cache import ModelCache # noqa: F401
|
from .model_manager import ModelManager, ModelInfo, AddModelResult, SchedulerPredictionType # noqa: F401 isort: split
|
||||||
|
|
||||||
from .lora import ModelPatcher, ONNXModelPatcher # noqa: F401
|
from .lora import ModelPatcher, ONNXModelPatcher # noqa: F401
|
||||||
|
from .model_cache import ModelCache # noqa: F401
|
||||||
from .models import ( # noqa: F401
|
from .models import ( # noqa: F401
|
||||||
BaseModelType,
|
BaseModelType,
|
||||||
ModelType,
|
|
||||||
SubModelType,
|
|
||||||
ModelVariantType,
|
|
||||||
ModelNotFoundException,
|
|
||||||
DuplicateModelException,
|
DuplicateModelException,
|
||||||
|
ModelNotFoundException,
|
||||||
|
ModelType,
|
||||||
|
ModelVariantType,
|
||||||
|
SubModelType,
|
||||||
)
|
)
|
||||||
from .model_merge import ModelMerger, MergeInterpolationMethod # noqa: F401
|
|
||||||
|
# This import must be last
|
||||||
|
from .model_merge import ModelMerger, MergeInterpolationMethod # noqa: F401 isort: split
|
||||||
|
@ -25,12 +25,7 @@ from typing import Optional, Union
|
|||||||
|
|
||||||
import requests
|
import requests
|
||||||
import torch
|
import torch
|
||||||
from diffusers.models import (
|
from diffusers.models import AutoencoderKL, ControlNetModel, PriorTransformer, UNet2DConditionModel
|
||||||
AutoencoderKL,
|
|
||||||
ControlNetModel,
|
|
||||||
PriorTransformer,
|
|
||||||
UNet2DConditionModel,
|
|
||||||
)
|
|
||||||
from diffusers.pipelines.latent_diffusion.pipeline_latent_diffusion import LDMBertConfig, LDMBertModel
|
from diffusers.pipelines.latent_diffusion.pipeline_latent_diffusion import LDMBertConfig, LDMBertModel
|
||||||
from diffusers.pipelines.paint_by_example import PaintByExampleImageEncoder
|
from diffusers.pipelines.paint_by_example import PaintByExampleImageEncoder
|
||||||
from diffusers.pipelines.pipeline_utils import DiffusionPipeline
|
from diffusers.pipelines.pipeline_utils import DiffusionPipeline
|
||||||
@ -64,6 +59,7 @@ from transformers import (
|
|||||||
|
|
||||||
from invokeai.app.services.config import InvokeAIAppConfig
|
from invokeai.app.services.config import InvokeAIAppConfig
|
||||||
from invokeai.backend.util.logging import InvokeAILogger
|
from invokeai.backend.util.logging import InvokeAILogger
|
||||||
|
|
||||||
from .models import BaseModelType, ModelVariantType
|
from .models import BaseModelType, ModelVariantType
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -1203,8 +1199,8 @@ def download_from_original_stable_diffusion_ckpt(
|
|||||||
StableDiffusionControlNetPipeline,
|
StableDiffusionControlNetPipeline,
|
||||||
StableDiffusionInpaintPipeline,
|
StableDiffusionInpaintPipeline,
|
||||||
StableDiffusionPipeline,
|
StableDiffusionPipeline,
|
||||||
StableDiffusionXLPipeline,
|
|
||||||
StableDiffusionXLImg2ImgPipeline,
|
StableDiffusionXLImg2ImgPipeline,
|
||||||
|
StableDiffusionXLPipeline,
|
||||||
StableUnCLIPImg2ImgPipeline,
|
StableUnCLIPImg2ImgPipeline,
|
||||||
StableUnCLIPPipeline,
|
StableUnCLIPPipeline,
|
||||||
)
|
)
|
||||||
|
@ -2,8 +2,8 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import copy
|
import copy
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from typing import Optional, Dict, Tuple, Any, Union, List
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import torch
|
import torch
|
||||||
@ -14,7 +14,6 @@ from transformers import CLIPTextModel, CLIPTokenizer
|
|||||||
|
|
||||||
from .models.lora import LoRAModel
|
from .models.lora import LoRAModel
|
||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
loras = [
|
loras = [
|
||||||
(lora_model1, 0.7),
|
(lora_model1, 0.7),
|
||||||
@ -307,9 +306,10 @@ class TextualInversionManager(BaseTextualInversionManager):
|
|||||||
|
|
||||||
|
|
||||||
class ONNXModelPatcher:
|
class ONNXModelPatcher:
|
||||||
from .models.base import IAIOnnxRuntimeModel
|
|
||||||
from diffusers import OnnxRuntimeModel
|
from diffusers import OnnxRuntimeModel
|
||||||
|
|
||||||
|
from .models.base import IAIOnnxRuntimeModel
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def apply_lora_unet(
|
def apply_lora_unet(
|
||||||
|
@ -17,18 +17,19 @@ context. Use like this:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import gc
|
import gc
|
||||||
|
import hashlib
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import hashlib
|
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Dict, Union, types, Optional, Type, Any
|
from typing import Any, Dict, Optional, Type, Union, types
|
||||||
|
|
||||||
import torch
|
import torch
|
||||||
|
|
||||||
import invokeai.backend.util.logging as logger
|
import invokeai.backend.util.logging as logger
|
||||||
from .models import BaseModelType, ModelType, SubModelType, ModelBase
|
|
||||||
|
from .models import BaseModelType, ModelBase, ModelType, SubModelType
|
||||||
|
|
||||||
# Maximum size of the cache, in gigs
|
# Maximum size of the cache, in gigs
|
||||||
# Default is roughly enough to hold three fp16 diffusers models in RAM simultaneously
|
# Default is roughly enough to hold three fp16 diffusers models in RAM simultaneously
|
||||||
|
@ -234,8 +234,8 @@ import textwrap
|
|||||||
import types
|
import types
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from shutil import rmtree, move
|
from shutil import move, rmtree
|
||||||
from typing import Optional, List, Literal, Tuple, Union, Dict, Set, Callable
|
from typing import Callable, Dict, List, Literal, Optional, Set, Tuple, Union
|
||||||
|
|
||||||
import torch
|
import torch
|
||||||
import yaml
|
import yaml
|
||||||
@ -246,20 +246,21 @@ from pydantic import BaseModel, Field
|
|||||||
import invokeai.backend.util.logging as logger
|
import invokeai.backend.util.logging as logger
|
||||||
from invokeai.app.services.config import InvokeAIAppConfig
|
from invokeai.app.services.config import InvokeAIAppConfig
|
||||||
from invokeai.backend.util import CUDA_DEVICE, Chdir
|
from invokeai.backend.util import CUDA_DEVICE, Chdir
|
||||||
|
|
||||||
from .model_cache import ModelCache, ModelLocker
|
from .model_cache import ModelCache, ModelLocker
|
||||||
from .model_search import ModelSearch
|
from .model_search import ModelSearch
|
||||||
from .models import (
|
from .models import (
|
||||||
BaseModelType,
|
|
||||||
ModelType,
|
|
||||||
SubModelType,
|
|
||||||
ModelError,
|
|
||||||
SchedulerPredictionType,
|
|
||||||
MODEL_CLASSES,
|
MODEL_CLASSES,
|
||||||
ModelConfigBase,
|
BaseModelType,
|
||||||
ModelNotFoundException,
|
|
||||||
InvalidModelException,
|
|
||||||
DuplicateModelException,
|
DuplicateModelException,
|
||||||
|
InvalidModelException,
|
||||||
ModelBase,
|
ModelBase,
|
||||||
|
ModelConfigBase,
|
||||||
|
ModelError,
|
||||||
|
ModelNotFoundException,
|
||||||
|
ModelType,
|
||||||
|
SchedulerPredictionType,
|
||||||
|
SubModelType,
|
||||||
)
|
)
|
||||||
|
|
||||||
# We are only starting to number the config file with release 3.
|
# We are only starting to number the config file with release 3.
|
||||||
|
@ -9,13 +9,14 @@ Copyright (c) 2023 Lincoln Stein and the InvokeAI Development Team
|
|||||||
import warnings
|
import warnings
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import List, Optional, Union
|
||||||
|
|
||||||
from diffusers import DiffusionPipeline
|
from diffusers import DiffusionPipeline
|
||||||
from diffusers import logging as dlogging
|
from diffusers import logging as dlogging
|
||||||
from typing import List, Union, Optional
|
|
||||||
|
|
||||||
import invokeai.backend.util.logging as logger
|
import invokeai.backend.util.logging as logger
|
||||||
|
|
||||||
from ...backend.model_management import ModelManager, ModelType, BaseModelType, ModelVariantType, AddModelResult
|
from ...backend.model_management import AddModelResult, BaseModelType, ModelManager, ModelType, ModelVariantType
|
||||||
|
|
||||||
|
|
||||||
class MergeInterpolationMethod(str, Enum):
|
class MergeInterpolationMethod(str, Enum):
|
||||||
|
@ -1,24 +1,23 @@
|
|||||||
import json
|
import json
|
||||||
import torch
|
|
||||||
import safetensors.torch
|
|
||||||
|
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
|
||||||
from diffusers import ModelMixin, ConfigMixin
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Callable, Literal, Union, Dict, Optional
|
from typing import Callable, Dict, Literal, Optional, Union
|
||||||
|
|
||||||
|
import safetensors.torch
|
||||||
|
import torch
|
||||||
|
from diffusers import ConfigMixin, ModelMixin
|
||||||
from picklescan.scanner import scan_file_path
|
from picklescan.scanner import scan_file_path
|
||||||
|
|
||||||
from .models import (
|
from .models import (
|
||||||
BaseModelType,
|
BaseModelType,
|
||||||
|
InvalidModelException,
|
||||||
ModelType,
|
ModelType,
|
||||||
ModelVariantType,
|
ModelVariantType,
|
||||||
SchedulerPredictionType,
|
SchedulerPredictionType,
|
||||||
SilenceWarnings,
|
SilenceWarnings,
|
||||||
InvalidModelException,
|
|
||||||
)
|
)
|
||||||
from .util import lora_token_vector_length
|
|
||||||
from .models.base import read_checkpoint_meta
|
from .models.base import read_checkpoint_meta
|
||||||
|
from .util import lora_token_vector_length
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
@ -5,8 +5,8 @@ Abstract base class for recursive directory search for models.
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from typing import List, Set, types
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import List, Set, types
|
||||||
|
|
||||||
import invokeai.backend.util.logging as logger
|
import invokeai.backend.util.logging as logger
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user