diff --git a/invokeai/backend/model_manager/load/optimizations.py b/invokeai/backend/model_manager/load/optimizations.py
index 030fcfa639..ccc5ff5513 100644
--- a/invokeai/backend/model_manager/load/optimizations.py
+++ b/invokeai/backend/model_manager/load/optimizations.py
@@ -17,7 +17,7 @@ def skip_torch_weight_init() -> Generator[None, None, None]:
     completely unnecessary if the intent is to load checkpoint weights from disk for the layer. This context manager
     monkey-patches common torch layers to skip the weight initialization step.
     """
-    torch_modules = [torch.nn.Linear, torch.nn.modules.conv._ConvNd, torch.nn.Embedding]
+    torch_modules = [torch.nn.Linear, torch.nn.modules.conv._ConvNd, torch.nn.Embedding, torch.nn.LayerNorm]
     saved_functions = [hasattr(m, "reset_parameters") and m.reset_parameters for m in torch_modules]
 
     try: