Fixed embiggening crash due to clear_cuda_cache not being passed on and bad cuda stats initialization. (#2756)

This commit is contained in:
Jonathan 2023-02-21 11:12:24 -06:00 committed by GitHub
parent 7fadd5e5c4
commit 3ab9d02883
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 7 additions and 2 deletions

View File

@ -178,7 +178,9 @@ class Generate:
self.model_hash = None
self.sampler = None
self.device = None
self.session_peakmem = None
self.max_memory_allocated = 0
self.memory_allocated = 0
self.session_peakmem = 0
self.base_generator = None
self.seed = None
self.outdir = outdir
@ -781,6 +783,7 @@ class Generate:
embiggen_tiles=opt.embiggen_tiles,
embiggen_strength=opt.embiggen_strength,
image_callback=callback,
clear_cuda_cache=self.clear_cuda_cache,
)
elif tool == "outpaint":
from ldm.invoke.restoration.outpaint import Outpaint

View File

@ -126,7 +126,7 @@ class Generator:
seed = self.new_seed()
# Free up memory from the last generation.
clear_cuda_cache = kwargs['clear_cuda_cache'] or None
clear_cuda_cache = kwargs['clear_cuda_cache'] if 'clear_cuda_cache' in kwargs else None
if clear_cuda_cache is not None:
clear_cuda_cache()

View File

@ -346,6 +346,7 @@ class Embiggen(Generator):
newinitimage = torch.from_numpy(newinitimage)
newinitimage = 2.0 * newinitimage - 1.0
newinitimage = newinitimage.to(self.model.device)
clear_cuda_cache = kwargs['clear_cuda_cache'] if 'clear_cuda_cache' in kwargs else None
tile_results = gen_img2img.generate(
prompt,
@ -363,6 +364,7 @@ class Embiggen(Generator):
init_image = newinitimage, # notice that init_image is different from init_img
mask_image = None,
strength = strength,
clear_cuda_cache = clear_cuda_cache
)
emb_tile_store.append(tile_results[0][0])