2023-07-25 02:02:57 +00:00
|
|
|
"""
|
|
|
|
This module defines a singleton object, "safety_checker" that
|
|
|
|
wraps the safety_checker model. It respects the global "nsfw_checker"
|
|
|
|
configuration variable, that allows the checker to be supressed.
|
|
|
|
"""
|
|
|
|
import numpy as np
|
|
|
|
from PIL import Image
|
2023-08-18 15:13:28 +00:00
|
|
|
|
|
|
|
import invokeai.backend.util.logging as logger
|
2023-07-25 02:02:57 +00:00
|
|
|
from invokeai.app.services.config import InvokeAIAppConfig
|
2023-08-18 15:13:28 +00:00
|
|
|
from invokeai.backend import SilenceWarnings
|
2023-07-25 02:02:57 +00:00
|
|
|
from invokeai.backend.util.devices import choose_torch_device
|
2023-07-27 14:54:01 +00:00
|
|
|
|
2023-07-25 02:02:57 +00:00
|
|
|
config = InvokeAIAppConfig.get_config()
|
|
|
|
|
|
|
|
CHECKER_PATH = "core/convert/stable-diffusion-safety-checker"
|
2023-07-27 14:54:01 +00:00
|
|
|
|
2023-07-25 02:02:57 +00:00
|
|
|
|
|
|
|
class SafetyChecker:
|
|
|
|
"""
|
|
|
|
Wrapper around SafetyChecker model.
|
|
|
|
"""
|
2023-07-27 14:54:01 +00:00
|
|
|
|
2023-07-25 02:02:57 +00:00
|
|
|
safety_checker = None
|
|
|
|
feature_extractor = None
|
|
|
|
tried_load: bool = False
|
|
|
|
|
|
|
|
@classmethod
|
2023-10-17 05:50:52 +00:00
|
|
|
def _load_safety_checker(cls):
|
|
|
|
if cls.tried_load:
|
2023-07-25 02:02:57 +00:00
|
|
|
return
|
2023-07-27 14:54:01 +00:00
|
|
|
|
2023-07-25 02:02:57 +00:00
|
|
|
if config.nsfw_checker:
|
|
|
|
try:
|
|
|
|
from diffusers.pipelines.stable_diffusion.safety_checker import StableDiffusionSafetyChecker
|
|
|
|
from transformers import AutoFeatureExtractor
|
2023-07-27 14:54:01 +00:00
|
|
|
|
2023-10-17 05:50:52 +00:00
|
|
|
cls.safety_checker = StableDiffusionSafetyChecker.from_pretrained(config.models_path / CHECKER_PATH)
|
|
|
|
cls.feature_extractor = AutoFeatureExtractor.from_pretrained(config.models_path / CHECKER_PATH)
|
2023-07-25 02:02:57 +00:00
|
|
|
logger.info("NSFW checker initialized")
|
|
|
|
except Exception as e:
|
|
|
|
logger.warning(f"Could not load NSFW checker: {str(e)}")
|
|
|
|
else:
|
|
|
|
logger.info("NSFW checker loading disabled")
|
2023-10-17 05:50:52 +00:00
|
|
|
cls.tried_load = True
|
2023-07-25 02:02:57 +00:00
|
|
|
|
|
|
|
@classmethod
|
2023-10-17 05:50:52 +00:00
|
|
|
def safety_checker_available(cls) -> bool:
|
|
|
|
cls._load_safety_checker()
|
|
|
|
return cls.safety_checker is not None
|
2023-07-25 02:02:57 +00:00
|
|
|
|
|
|
|
@classmethod
|
2023-10-17 05:50:52 +00:00
|
|
|
def has_nsfw_concept(cls, image: Image.Image) -> bool:
|
|
|
|
if not cls.safety_checker_available():
|
2023-07-25 02:02:57 +00:00
|
|
|
return False
|
2023-07-27 14:54:01 +00:00
|
|
|
|
2023-07-25 02:02:57 +00:00
|
|
|
device = choose_torch_device()
|
2023-10-17 05:50:52 +00:00
|
|
|
features = cls.feature_extractor([image], return_tensors="pt")
|
2023-07-25 02:02:57 +00:00
|
|
|
features.to(device)
|
2023-10-17 05:50:52 +00:00
|
|
|
cls.safety_checker.to(device)
|
2023-07-25 02:02:57 +00:00
|
|
|
x_image = np.array(image).astype(np.float32) / 255.0
|
|
|
|
x_image = x_image[None].transpose(0, 3, 1, 2)
|
|
|
|
with SilenceWarnings():
|
2023-10-17 05:50:52 +00:00
|
|
|
checked_image, has_nsfw_concept = cls.safety_checker(images=x_image, clip_input=features.pixel_values)
|
2023-07-25 02:02:57 +00:00
|
|
|
return has_nsfw_concept[0]
|