remove redundant import

This commit is contained in:
Lincoln Stein 2023-07-06 13:24:58 -04:00
parent 581be42c75
commit e573a533ae
2 changed files with 4 additions and 8 deletions

View File

@ -197,10 +197,10 @@ class ModelInstall(object):
# folders style or similar
elif path.is_dir() and any([(path/x).exists() for x in \
{'config.json','model_index.json','learned_embeds.bin','pytorch_lora_weights.bin'}
]
):
models_installed.update(self._install_path(path))
{'config.json','model_index.json','learned_embeds.bin','pytorch_lora_weights.bin'}
]
):
models_installed.update(self._install_path(path))
# recursive scan
elif path.is_dir():
@ -223,7 +223,6 @@ class ModelInstall(object):
# install a model from a local path. The optional info parameter is there to prevent
# the model from being probed twice in the event that it has already been probed.
def _install_path(self, path: Path, info: ModelProbeInfo=None)->AddModelResult:
model_result = None
info = info or ModelProbe().heuristic_probe(path,self.prediction_helper)
if not info:
logger.warning(f'Unable to parse format of {path}')

View File

@ -4,15 +4,12 @@ import copy
from contextlib import contextmanager
from typing import Optional, Dict, Tuple, Any, Union, List
from pathlib import Path
from typing import Any, Dict, Optional, Tuple, Union, List
import torch
from compel.embeddings_provider import BaseTextualInversionManager
from diffusers.models import UNet2DConditionModel
from safetensors.torch import load_file
from diffusers.models import UNet2DConditionModel
from transformers import CLIPTextModel, CLIPTokenizer
from torch.utils.hooks import RemovableHandle
class LoRALayerBase:
#rank: Optional[int]