|
|
|
@ -200,6 +200,13 @@ def model_lora_keys_clip(model, key_map={}):
|
|
|
|
|
lora_key = "lora_prior_te_text_model_encoder_layers_{}_{}".format(b, LORA_CLIP_MAP[c]) #cascade lora: TODO put lora key prefix in the model config
|
|
|
|
|
key_map[lora_key] = k
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
k = "clip_g.text_projection"
|
|
|
|
|
if k in sdk:
|
|
|
|
|
key_map["lora_prior_te_text_projection"] = k #cascade lora
|
|
|
|
|
# key_map["text_encoder.text_projection"] = k #TODO: check if other lora have the text_projection too
|
|
|
|
|
# key_map["lora_te_text_projection"] = k
|
|
|
|
|
|
|
|
|
|
return key_map
|
|
|
|
|
|
|
|
|
|
def model_lora_keys_unet(model, key_map={}):
|
|
|
|
|