add ram cache module and support files

This commit is contained in:
Lincoln Stein
2024-01-31 23:37:59 -05:00
committed by psychedelicious
parent a1307b9f2e
commit 5c2884569e
14 changed files with 1334 additions and 6 deletions

View File

@ -18,9 +18,9 @@ from .config import (
InvalidModelConfigException,
ModelConfigFactory,
ModelFormat,
ModelRepoVariant,
ModelType,
ModelVariantType,
ModelRepoVariant,
SchedulerPredictionType,
)
from .hash import FastModelHash
@ -483,8 +483,8 @@ class FolderProbeBase(ProbeBase):
def get_repo_variant(self) -> ModelRepoVariant:
# get all files ending in .bin or .safetensors
weight_files = list(self.model_path.glob('**/*.safetensors'))
weight_files.extend(list(self.model_path.glob('**/*.bin')))
weight_files = list(self.model_path.glob("**/*.safetensors"))
weight_files.extend(list(self.model_path.glob("**/*.bin")))
for x in weight_files:
if ".fp16" in x.suffixes:
return ModelRepoVariant.FP16
@ -496,6 +496,7 @@ class FolderProbeBase(ProbeBase):
return ModelRepoVariant.ONNX
return ModelRepoVariant.DEFAULT
class PipelineFolderProbe(FolderProbeBase):
def get_base_type(self) -> BaseModelType:
with open(self.model_path / "unet" / "config.json", "r") as file:
@ -540,7 +541,6 @@ class PipelineFolderProbe(FolderProbeBase):
except Exception:
pass
return ModelVariantType.Normal
class VaeFolderProbe(FolderProbeBase):