multimodalart HF staff commited on
Commit
5ae33c3
·
1 Parent(s): 3424ad0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -3
app.py CHANGED
@@ -166,11 +166,8 @@ def run_lora(prompt, negative, lora_scale, selected_state, sdxl_loras, sdxl_lora
166
  loaded_state_dict = copy.deepcopy(state_dicts[repo_name]["state_dict"])
167
  cross_attention_kwargs = None
168
  if last_lora != repo_name:
169
- #if(last_fused):
170
- #pipe.unfuse_lora()
171
  pipe.load_lora_weights(loaded_state_dict, adapter_name=sdxl_loras[selected_state.index]["repo"])
172
  pipe.set_adapters([sdxl_loras[selected_state.index]["repo"], "lcm_lora"], adapter_weights=[0.8, 1.0])
173
- #last_fused = True
174
  is_pivotal = sdxl_loras[selected_state.index]["is_pivotal"]
175
  if(is_pivotal):
176
  #Add the textual inversion embeddings from pivotal tuning models
 
166
  loaded_state_dict = copy.deepcopy(state_dicts[repo_name]["state_dict"])
167
  cross_attention_kwargs = None
168
  if last_lora != repo_name:
 
 
169
  pipe.load_lora_weights(loaded_state_dict, adapter_name=sdxl_loras[selected_state.index]["repo"])
170
  pipe.set_adapters([sdxl_loras[selected_state.index]["repo"], "lcm_lora"], adapter_weights=[0.8, 1.0])
 
171
  is_pivotal = sdxl_loras[selected_state.index]["is_pivotal"]
172
  if(is_pivotal):
173
  #Add the textual inversion embeddings from pivotal tuning models