2023-04-06 04:06:05 +00:00
|
|
|
# Copyright (c) 2023 Kyle Schouviller (https://github.com/kyle0654)
|
|
|
|
|
|
|
|
from abc import ABC, abstractmethod
|
|
|
|
from pathlib import Path
|
|
|
|
from queue import Queue
|
2023-08-18 14:57:18 +00:00
|
|
|
from typing import Dict, Optional, Union
|
2023-04-06 04:06:05 +00:00
|
|
|
|
|
|
|
import torch
|
|
|
|
|
2023-07-27 14:54:01 +00:00
|
|
|
|
2023-04-06 04:06:05 +00:00
|
|
|
class LatentsStorageBase(ABC):
|
|
|
|
"""Responsible for storing and retrieving latents."""
|
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
def get(self, name: str) -> torch.Tensor:
|
|
|
|
pass
|
|
|
|
|
|
|
|
@abstractmethod
|
2023-05-21 07:26:46 +00:00
|
|
|
def save(self, name: str, data: torch.Tensor) -> None:
|
2023-04-06 04:06:05 +00:00
|
|
|
pass
|
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
def delete(self, name: str) -> None:
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class ForwardCacheLatentsStorage(LatentsStorageBase):
|
|
|
|
"""Caches the latest N latents in memory, writing-thorugh to and reading from underlying storage"""
|
2023-07-27 14:54:01 +00:00
|
|
|
|
2023-04-06 04:06:05 +00:00
|
|
|
__cache: Dict[str, torch.Tensor]
|
|
|
|
__cache_ids: Queue
|
|
|
|
__max_cache_size: int
|
|
|
|
__underlying_storage: LatentsStorageBase
|
|
|
|
|
|
|
|
def __init__(self, underlying_storage: LatentsStorageBase, max_cache_size: int = 20):
|
|
|
|
self.__underlying_storage = underlying_storage
|
|
|
|
self.__cache = dict()
|
|
|
|
self.__cache_ids = Queue()
|
|
|
|
self.__max_cache_size = max_cache_size
|
|
|
|
|
|
|
|
def get(self, name: str) -> torch.Tensor:
|
|
|
|
cache_item = self.__get_cache(name)
|
|
|
|
if cache_item is not None:
|
|
|
|
return cache_item
|
|
|
|
|
|
|
|
latent = self.__underlying_storage.get(name)
|
|
|
|
self.__set_cache(name, latent)
|
|
|
|
return latent
|
|
|
|
|
2023-05-21 07:26:46 +00:00
|
|
|
def save(self, name: str, data: torch.Tensor) -> None:
|
2023-05-21 10:05:33 +00:00
|
|
|
self.__underlying_storage.save(name, data)
|
2023-04-06 04:06:05 +00:00
|
|
|
self.__set_cache(name, data)
|
|
|
|
|
|
|
|
def delete(self, name: str) -> None:
|
|
|
|
self.__underlying_storage.delete(name)
|
|
|
|
if name in self.__cache:
|
|
|
|
del self.__cache[name]
|
|
|
|
|
2023-07-03 16:17:45 +00:00
|
|
|
def __get_cache(self, name: str) -> Optional[torch.Tensor]:
|
2023-04-06 04:06:05 +00:00
|
|
|
return None if name not in self.__cache else self.__cache[name]
|
|
|
|
|
|
|
|
def __set_cache(self, name: str, data: torch.Tensor):
|
2023-08-17 22:45:25 +00:00
|
|
|
if name not in self.__cache:
|
2023-04-06 04:06:05 +00:00
|
|
|
self.__cache[name] = data
|
|
|
|
self.__cache_ids.put(name)
|
|
|
|
if self.__cache_ids.qsize() > self.__max_cache_size:
|
|
|
|
self.__cache.pop(self.__cache_ids.get())
|
|
|
|
|
|
|
|
|
|
|
|
class DiskLatentsStorage(LatentsStorageBase):
|
|
|
|
"""Stores latents in a folder on disk without caching"""
|
|
|
|
|
2023-07-03 14:55:04 +00:00
|
|
|
__output_folder: Union[str, Path]
|
2023-04-06 04:06:05 +00:00
|
|
|
|
2023-07-03 14:55:04 +00:00
|
|
|
def __init__(self, output_folder: Union[str, Path]):
|
2023-06-14 14:16:09 +00:00
|
|
|
self.__output_folder = output_folder if isinstance(output_folder, Path) else Path(output_folder)
|
|
|
|
self.__output_folder.mkdir(parents=True, exist_ok=True)
|
2023-04-06 04:06:05 +00:00
|
|
|
|
|
|
|
def get(self, name: str) -> torch.Tensor:
|
|
|
|
latent_path = self.get_path(name)
|
|
|
|
return torch.load(latent_path)
|
|
|
|
|
2023-05-21 07:26:46 +00:00
|
|
|
def save(self, name: str, data: torch.Tensor) -> None:
|
2023-06-14 14:16:09 +00:00
|
|
|
self.__output_folder.mkdir(parents=True, exist_ok=True)
|
2023-04-06 04:06:05 +00:00
|
|
|
latent_path = self.get_path(name)
|
|
|
|
torch.save(data, latent_path)
|
|
|
|
|
|
|
|
def delete(self, name: str) -> None:
|
|
|
|
latent_path = self.get_path(name)
|
2023-06-14 14:16:09 +00:00
|
|
|
latent_path.unlink()
|
2023-04-06 04:06:05 +00:00
|
|
|
|
2023-06-14 14:16:09 +00:00
|
|
|
def get_path(self, name: str) -> Path:
|
|
|
|
return self.__output_folder / name
|