multiple fixes in response to preflight testing bug reports

- updated environment-mac.yml #932
- use the upstream GFPGAN library now that issues with color-changing fixed
  and facial recognition improved #905
- preload_models fixed to download additional models needed by gfpgan
This commit is contained in:
Lincoln Stein 2022-10-05 12:44:16 -04:00
parent 488890e6bb
commit 4d5b208601
6 changed files with 64 additions and 49 deletions

3
.gitignore vendored
View File

@ -196,3 +196,6 @@ checkpoints
.vscode/
gfpgan/
models/ldm/stable-diffusion-v1/model.sha256
# GFPGAN model files
gfpgan/

View File

@ -3,12 +3,12 @@ channels:
- pytorch
- conda-forge
dependencies:
- python==3.10.5
- python==3.9.13
- pip==22.2.2
# pytorch left unpinned
- pytorch
- torchvision
- pytorch==1.12.1
- torchvision==0.13.1
# I suggest to keep the other deps sorted for convenience.
# To determine what the latest versions should be, run:
@ -27,13 +27,12 @@ dependencies:
- imgaug==0.4.0
- kornia==0.6.7
- mpmath==1.2.1
- nomkl
- nomkl=1.0
- numpy==1.23.2
- omegaconf==2.1.1
- openh264==2.3.0
- onnx==1.12.0
- onnxruntime==1.12.1
- protobuf==3.19.4
- pudb==2022.1
- pytorch-lightning==1.7.5
- scipy==1.9.1
@ -46,9 +45,9 @@ dependencies:
- flask_socketio==5.3.0
- flask_cors==3.0.10
- dependency_injector==4.40.0
- eventlet
- eventlet==0.33.1
- opencv-python==4.6.0
- protobuf==3.20.1
- protobuf==3.19.5
- realesrgan==0.2.5.0
- send2trash==1.8.0
- test-tube==0.7.5
@ -57,7 +56,7 @@ dependencies:
- -e git+https://github.com/CompVis/taming-transformers.git@master#egg=taming-transformers
- -e git+https://github.com/openai/CLIP.git@main#egg=clip
- -e git+https://github.com/Birch-san/k-diffusion.git@mps#egg=k_diffusion
- -e git+https://github.com/lstein/GFPGAN@fix-dark-cast-images#egg=gfpgan
- -e git+https://github.com/TencentARC/GFPGAN.git#egg=gfpgan
- -e .
variables:
PYTORCH_ENABLE_MPS_FALLBACK: 1

View File

@ -36,5 +36,5 @@ dependencies:
- -e git+https://github.com/openai/CLIP.git@main#egg=clip
- -e git+https://github.com/CompVis/taming-transformers.git@master#egg=taming-transformers
- -e git+https://github.com/Birch-san/k-diffusion.git@mps#egg=k_diffusion
- -e git+https://github.com/lstein/GFPGAN@fix-dark-cast-images#egg=gfpgan
- -e git+https://github.com/TencentARC/GFPGAN.git#egg=gfpgan
- -e .

View File

@ -20,6 +20,6 @@ torchmetrics==0.6.0
transformers==4.19.2
-e git+https://github.com/openai/CLIP.git@main#egg=clip
-e git+https://github.com/CompVis/taming-transformers.git@master#egg=taming-transformers
git+https://github.com/lstein/k-diffusion.git@master#egg=k-diffusion
-e git+https://github.com/lstein/GFPGAN@fix-dark-cast-images#egg=gfpgan
-e git+https://github.com/lstein/k-diffusion.git@master#egg=k-diffusion
-e git+https://github.com/TencentARC/GFPGAN.git#egg=gfpgan
-e .

View File

@ -33,4 +33,4 @@ dependency_injector==4.40.0
eventlet
git+https://github.com/openai/CLIP.git@main#egg=clip
git+https://github.com/Birch-san/k-diffusion.git@mps#egg=k-diffusion
git+https://github.com/lstein/GFPGAN@fix-dark-cast-images#egg=gfpgan
git+https://github.com/TencentARC/GFPGAN.git#egg=gfpgan

View File

@ -15,13 +15,13 @@ import urllib.request
transformers.logging.set_verbosity_error()
# this will preload the Bert tokenizer fles
print('preloading bert tokenizer...')
print('preloading bert tokenizer...', end='')
tokenizer = BertTokenizerFast.from_pretrained('bert-base-uncased')
print('...success')
# this will download requirements for Kornia
print('preloading Kornia requirements (ignore the deprecation warnings)...')
print('preloading Kornia requirements...', end='')
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
import kornia
@ -29,12 +29,12 @@ print('...success')
version = 'openai/clip-vit-large-patch14'
print('preloading CLIP model (Ignore the deprecation warnings)...')
print('preloading CLIP model...',end='')
sys.stdout.flush()
tokenizer = CLIPTokenizer.from_pretrained(version)
transformer = CLIPTextModel.from_pretrained(version)
print('\n\n...success')
print('...success')
# In the event that the user has installed GFPGAN and also elected to use
# RealESRGAN, this will attempt to download the model needed by RealESRGANer
@ -47,7 +47,7 @@ except ModuleNotFoundError:
pass
if gfpgan:
print('Loading models from RealESRGAN and facexlib')
print('Loading models from RealESRGAN and facexlib...',end='')
try:
from realesrgan.archs.srvgg_arch import SRVGGNetCompact
from facexlib.utils.face_restoration_helper import FaceRestoreHelper
@ -65,28 +65,41 @@ if gfpgan:
print('Error loading ESRGAN:')
print(traceback.format_exc())
try:
print('Loading models from GFPGAN')
import urllib.request
model_url = 'https://github.com/TencentARC/GFPGAN/releases/download/v1.3.0/GFPGANv1.4.pth'
model_dest = 'src/gfpgan/experiments/pretrained_models/GFPGANv1.4.pth'
for model in (
[
'https://github.com/TencentARC/GFPGAN/releases/download/v1.3.0/GFPGANv1.4.pth',
'src/gfpgan/experiments/pretrained_models/GFPGANv1.4.pth'
],
[
'https://github.com/xinntao/facexlib/releases/download/v0.1.0/detection_Resnet50_Final.pth',
'./gfpgan/weights/detection_Resnet50_Final.pth'
],
[
'https://github.com/xinntao/facexlib/releases/download/v0.2.2/parsing_parsenet.pth',
'./gfpgan/weights/parsing_parsenet.pth'
],
):
model_url,model_dest = model
try:
if not os.path.exists(model_dest):
print('downloading gfpgan model file...')
os.makedirs(os.path.dirname(model_dest))
print(f'Downloading gfpgan model file {model_url}...',end='')
os.makedirs(os.path.dirname(model_dest), exist_ok=True)
urllib.request.urlretrieve(model_url,model_dest)
print('...success')
except Exception:
import traceback
print('Error loading GFPGAN:')
print(traceback.format_exc())
print('...success')
print('preloading CodeFormer model file...')
print('preloading CodeFormer model file...',end='')
try:
import urllib.request
model_url = 'https://github.com/sczhou/CodeFormer/releases/download/v0.1.0/codeformer.pth'
model_dest = 'ldm/dream/restoration/codeformer/weights/codeformer.pth'
if not os.path.exists(model_dest):
print('downloading codeformer model file...')
print('Downloading codeformer model file...')
os.makedirs(os.path.dirname(model_dest), exist_ok=True)
urllib.request.urlretrieve(model_url,model_dest)
except Exception: