Skip to content

Commit

Permalink
Fix lora issue.
Browse files Browse the repository at this point in the history
  • Loading branch information
comfyanonymous committed Sep 8, 2024
1 parent a5da4d0 commit 9c5fca7
Showing 1 changed file with 7 additions and 5 deletions.
12 changes: 7 additions & 5 deletions comfy/lora.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,15 +248,17 @@ def model_lora_keys_clip(model, key_map={}):
for k in sdk:
if k.endswith(".weight"):
if k.startswith("t5xxl.transformer."):#OneTrainer SD3 and Flux lora
l_key = k[len("t5xxl.transformer."):-len(".weight")]
t5_index = 1
if clip_l_present:
t5_index += 1
if clip_g_present:
t5_index += 1
if clip_l_present:
t5_index += 1
if t5_index == 2:
key_map["lora_te{}_{}".format(t5_index, l_key.replace(".", "_"))] = k #OneTrainer Flux
t5_index += 1

l_key = k[len("t5xxl.transformer."):-len(".weight")]
lora_key = "lora_te{}_{}".format(t5_index, l_key.replace(".", "_"))
key_map[lora_key] = k
key_map["lora_te{}_{}".format(t5_index, l_key.replace(".", "_"))] = k
elif k.startswith("hydit_clip.transformer.bert."): #HunyuanDiT Lora
l_key = k[len("hydit_clip.transformer.bert."):-len(".weight")]
lora_key = "lora_te1_{}".format(l_key.replace(".", "_"))
Expand Down

0 comments on commit 9c5fca7

Please sign in to comment.