remove redundant import

This commit is contained in:
Lincoln Stein 2023-07-06 13:24:58 -04:00
parent 581be42c75
commit e573a533ae
2 changed files with 4 additions and 8 deletions

View File

@ -197,10 +197,10 @@ class ModelInstall(object):
# folders style or similar # folders style or similar
elif path.is_dir() and any([(path/x).exists() for x in \ elif path.is_dir() and any([(path/x).exists() for x in \
{'config.json','model_index.json','learned_embeds.bin','pytorch_lora_weights.bin'} {'config.json','model_index.json','learned_embeds.bin','pytorch_lora_weights.bin'}
] ]
): ):
models_installed.update(self._install_path(path)) models_installed.update(self._install_path(path))
# recursive scan # recursive scan
elif path.is_dir(): elif path.is_dir():
@ -223,7 +223,6 @@ class ModelInstall(object):
# install a model from a local path. The optional info parameter is there to prevent # install a model from a local path. The optional info parameter is there to prevent
# the model from being probed twice in the event that it has already been probed. # the model from being probed twice in the event that it has already been probed.
def _install_path(self, path: Path, info: ModelProbeInfo=None)->AddModelResult: def _install_path(self, path: Path, info: ModelProbeInfo=None)->AddModelResult:
model_result = None
info = info or ModelProbe().heuristic_probe(path,self.prediction_helper) info = info or ModelProbe().heuristic_probe(path,self.prediction_helper)
if not info: if not info:
logger.warning(f'Unable to parse format of {path}') logger.warning(f'Unable to parse format of {path}')

View File

@ -4,15 +4,12 @@ import copy
from contextlib import contextmanager from contextlib import contextmanager
from typing import Optional, Dict, Tuple, Any, Union, List from typing import Optional, Dict, Tuple, Any, Union, List
from pathlib import Path from pathlib import Path
from typing import Any, Dict, Optional, Tuple, Union, List
import torch import torch
from compel.embeddings_provider import BaseTextualInversionManager from compel.embeddings_provider import BaseTextualInversionManager
from diffusers.models import UNet2DConditionModel from diffusers.models import UNet2DConditionModel
from safetensors.torch import load_file from safetensors.torch import load_file
from diffusers.models import UNet2DConditionModel
from transformers import CLIPTextModel, CLIPTokenizer from transformers import CLIPTextModel, CLIPTokenizer
from torch.utils.hooks import RemovableHandle
class LoRALayerBase: class LoRALayerBase:
#rank: Optional[int] #rank: Optional[int]