fix infinite hang during GFPGAN duration inadvertently introduced during batch_size cleanup

This commit is contained in:
Lincoln Stein 2022-08-31 08:18:44 -04:00
commit 462a1961e4
2 changed files with 3 additions and 1 deletions

View File

@ -30,3 +30,5 @@ dependencies:
- -e git+https://github.com/CompVis/taming-transformers.git@master#egg=taming-transformers - -e git+https://github.com/CompVis/taming-transformers.git@master#egg=taming-transformers
- -e git+https://github.com/lstein/k-diffusion.git@master#egg=k-diffusion - -e git+https://github.com/lstein/k-diffusion.git@master#egg=k-diffusion
- -e . - -e .
variables:
PYTORCH_ENABLE_MPS_FALLBACK: 1

View File

@ -124,7 +124,7 @@ class T2I:
grid=False, grid=False,
width=512, width=512,
height=512, height=512,
sampler_name='klms', sampler_name='k_lms',
latent_channels=4, latent_channels=4,
downsampling_factor=8, downsampling_factor=8,
ddim_eta=0.0, # deterministic ddim_eta=0.0, # deterministic