Remove line that was intended to save memory, but wasn't actually having any effect.

This commit is contained in:
Ryan Dick 2024-04-04 11:29:32 -04:00
parent 4af258615f
commit 6e4de001f1

View File

@ -544,12 +544,8 @@ class LoRAModelRaw(RawModel): # (torch.nn.Module):
for layer_key, values in state_dict.items(): for layer_key, values in state_dict.items():
layer = layer_cls(layer_key, values) layer = layer_cls(layer_key, values)
# lower memory consumption by removing already parsed layer values
state_dict[layer_key].clear()
layer.to(device=device, dtype=dtype) layer.to(device=device, dtype=dtype)
model.layers[layer_key] = layer model.layers[layer_key] = layer
return model return model
@staticmethod @staticmethod