fix merge conflicts; tested and working

This commit is contained in:
Lincoln Stein 2024-07-03 23:08:29 -04:00
parent b20b3006a3
commit 58708c5d2d
2 changed files with 8 additions and 5 deletions

View File

@ -3,6 +3,7 @@
import io
import pathlib
import shutil
import traceback
from copy import deepcopy
from enum import Enum
@ -863,19 +864,20 @@ async def set_cache_size(
# Try to apply the target state.
cache.max_vram_cache_size = vram_new
cache.max_cache_size = ram_new
app_config.max_cache_size = ram_new
app_config.max_vram_cache_size = vram_new
app_config.ram = ram_new
app_config.vram = vram_new
if persist:
app_config.write_file(new_config_path)
shutil.move(new_config_path, config_path)
except Exception as e:
# If there was a failure, restore the initial state.
cache.max_vram_cache_size = vram_old
cache.max_cache_size = ram_old
app_config.max_cache_size = ram_old
app_config.max_vram_cache_size = vram_old
cache.max_vram_cache_size = vram_old
app_config.ram = ram_old
app_config.vram = vram_old
raise RuntimeError("Failed to update cache size") from e
return value
@model_manager_router.get(

View File

@ -66,6 +66,7 @@ class ModelLoader(ModelLoaderBase):
return (model_base / config.path).resolve()
def _load_and_cache(self, config: AnyModelConfig, submodel_type: Optional[SubModelType] = None) -> ModelLockerBase:
stats_name = ":".join([config.base, config.type, config.name, (submodel_type or "")])
try:
return self._ram_cache.get(config.key, submodel_type, stats_name=stats_name)
except IndexError: