From 9d103ef03045e5e1db0c012e1368b8eb5619a58b Mon Sep 17 00:00:00 2001 From: Lincoln Stein Date: Thu, 29 Dec 2022 09:00:50 -0500 Subject: [PATCH] attempt to address memory issues when loading ckpt models (#2128) - A couple of users have reported that switching back and forth between ckpt models is causing a "GPU out of memory" crash. Traceback suggests there is actually a CPU RAM issue. - This speculative test simply performs a round of garbage collection before the point where the crash occurs. --- ldm/invoke/model_cache.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/ldm/invoke/model_cache.py b/ldm/invoke/model_cache.py index 623160e67d..e09e5323fa 100644 --- a/ldm/invoke/model_cache.py +++ b/ldm/invoke/model_cache.py @@ -242,6 +242,9 @@ class ModelCache(object): # merged models from auto11 merge board are flat for some reason if 'state_dict' in sd: sd = sd['state_dict'] + + print(f' | Forcing garbage collection prior to loading new model') + gc.collect() model = instantiate_from_config(omega_config.model) model.load_state_dict(sd, strict=False)