InvokeAI/invokeai/backend/model_management/models/lora.py

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

77 lines
2.0 KiB
Python
Raw Normal View History

import os
2023-06-11 01:49:09 +00:00
import torch
2023-06-20 00:30:09 +00:00
from enum import Enum
2023-06-14 01:26:21 +00:00
from typing import Optional, Union, Literal
2023-06-11 01:49:09 +00:00
from .base import (
ModelBase,
ModelConfigBase,
BaseModelType,
ModelType,
SubModelType,
classproperty,
2023-06-11 01:49:09 +00:00
)
# TODO: naming
from ..lora import LoRAModel as LoRAModelRaw
2023-06-20 00:30:09 +00:00
class LoRAModelFormat(str, Enum):
LyCORIS = "lycoris"
Diffusers = "diffusers"
2023-06-11 01:49:09 +00:00
class LoRAModel(ModelBase):
#model_size: int
2023-06-17 14:15:36 +00:00
class Config(ModelConfigBase):
2023-06-20 00:30:09 +00:00
model_format: LoRAModelFormat # TODO:
2023-06-11 01:49:09 +00:00
def __init__(self, model_path: str, base_model: BaseModelType, model_type: ModelType):
assert model_type == ModelType.Lora
super().__init__(model_path, base_model, model_type)
self.model_size = os.path.getsize(self.model_path)
def get_size(self, child_type: Optional[SubModelType] = None):
if child_type is not None:
raise Exception("There is no child models in lora")
return self.model_size
def get_model(
self,
torch_dtype: Optional[torch.dtype],
child_type: Optional[SubModelType] = None,
):
if child_type is not None:
raise Exception("There is no child models in lora")
model = LoRAModelRaw.from_checkpoint(
file_path=self.model_path,
dtype=torch_dtype,
)
self.model_size = model.calc_size()
return model
@classproperty
2023-06-11 01:49:09 +00:00
def save_to_config(cls) -> bool:
return False
@classmethod
def detect_format(cls, path: str):
if os.path.isdir(path):
2023-06-20 00:30:09 +00:00
return LoRAModelFormat.Diffusers
2023-06-11 01:49:09 +00:00
else:
2023-06-20 00:30:09 +00:00
return LoRAModelFormat.LyCORIS
2023-06-11 01:49:09 +00:00
@classmethod
def convert_if_required(
cls,
model_path: str,
output_path: str,
config: ModelConfigBase,
base_model: BaseModelType,
) -> str:
2023-06-20 00:30:09 +00:00
if cls.detect_format(model_path) == LoRAModelFormat.Diffusers:
2023-06-11 01:49:09 +00:00
# TODO: add diffusers lora when it stabilizes a bit
raise NotImplementedError("Diffusers lora not supported")
else:
return model_path