From 5d411574041d00ea5decaf81134d6b3b0d68ca62 Mon Sep 17 00:00:00 2001 From: Ryan Dick Date: Thu, 4 Apr 2024 17:15:05 -0400 Subject: [PATCH] Add LayerNorm to list of modules optimized by skip_torch_weight_init() --- invokeai/backend/model_manager/load/optimizations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/invokeai/backend/model_manager/load/optimizations.py b/invokeai/backend/model_manager/load/optimizations.py index 030fcfa639..ccc5ff5513 100644 --- a/invokeai/backend/model_manager/load/optimizations.py +++ b/invokeai/backend/model_manager/load/optimizations.py @@ -17,7 +17,7 @@ def skip_torch_weight_init() -> Generator[None, None, None]: completely unnecessary if the intent is to load checkpoint weights from disk for the layer. This context manager monkey-patches common torch layers to skip the weight initialization step. """ - torch_modules = [torch.nn.Linear, torch.nn.modules.conv._ConvNd, torch.nn.Embedding] + torch_modules = [torch.nn.Linear, torch.nn.modules.conv._ConvNd, torch.nn.Embedding, torch.nn.LayerNorm] saved_functions = [hasattr(m, "reset_parameters") and m.reset_parameters for m in torch_modules] try: