Remove unused _lora_forward_hook(...).

This commit is contained in:
Ryan Dick 2023-10-19 15:20:13 -04:00 committed by Kent Keirsey
parent 7f4ce518b7
commit 2ba5b44ec4

View File

@ -54,24 +54,6 @@ class ModelPatcher:
return (module_key, module)
@staticmethod
def _lora_forward_hook(
applied_loras: List[Tuple[LoRAModel, float]],
layer_name: str,
):
def lora_forward(module, input_h, output):
if len(applied_loras) == 0:
return output
for lora, weight in applied_loras:
layer = lora.layers.get(layer_name, None)
if layer is None:
continue
output += layer.forward(module, input_h, weight)
return output
return lora_forward
@classmethod
@contextmanager
def apply_lora_unet(