We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 048d6b5 commit af92bb2Copy full SHA for af92bb2
vllm/lora/models.py
@@ -132,8 +132,6 @@ def from_lora_tensors(
132
pin_memory = str(device) == "cpu" and is_pin_memory_available()
133
loras: dict[str, LoRALayerWeights] = {}
134
for tensor_name, tensor in tensors.items():
135
- if "lm_head" in tensor_name:
136
- pass
137
module_name, is_lora_a, is_bias = parse_fine_tuned_lora_name(
138
tensor_name, weights_mapper)
139
if module_name not in loras:
0 commit comments