Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -4,27 +4,28 @@ from huggingface_hub import HfApi
|
|
4 |
import spaces
|
5 |
|
6 |
@spaces.GPU
|
7 |
-
|
8 |
-
def write_repo(base_model, model_to_merge):
|
9 |
-
with open("repo.txt", "w") as repo:
|
10 |
-
repo.writelines(base_model, model_to_merge)
|
11 |
-
|
12 |
-
def merge_and_upload(weight_drop_prob, scaling_factor, repo_name, token):
|
13 |
# Define a fixed output path
|
14 |
output_path = "/home/user/project/output"
|
|
|
|
|
|
|
|
|
|
|
15 |
# Construct the command to run hf_merge.py
|
16 |
command = [
|
17 |
"python3", "hf_merge.py",
|
18 |
"-p", str(weight_drop_prob),
|
19 |
"-lambda", str(scaling_factor),
|
20 |
-
"repo.txt",
|
|
|
21 |
]
|
22 |
|
23 |
# Run the command and capture the output
|
24 |
result = subprocess.run(command, capture_output=True, text=True)
|
25 |
|
26 |
# Check if the merge was successful
|
27 |
-
if result.returncode
|
28 |
return f"Error in merging models: {result.stderr}"
|
29 |
|
30 |
# Upload the result to Hugging Face Hub
|
@@ -35,28 +36,25 @@ def merge_and_upload(weight_drop_prob, scaling_factor, repo_name, token):
|
|
35 |
|
36 |
# Upload the file
|
37 |
api.upload_file(
|
38 |
-
path_or_fileobj=output_path,
|
39 |
-
path_in_repo="merged_model.
|
40 |
repo_id=repo_name,
|
41 |
token=token
|
42 |
)
|
43 |
return f"Model merged and uploaded successfully to {repo_name}!"
|
44 |
except Exception as e:
|
45 |
-
return f"Error uploading to Hugging Face Hub: {str(e)}"
|
46 |
|
47 |
# Define the Gradio interface
|
48 |
iface = gr.Interface(
|
49 |
-
fn=write_repo,
|
50 |
-
inputs=[
|
51 |
-
gr.Textbox(label="Models"),
|
52 |
-
gr.Textbox(label="Model to Merge")
|
53 |
-
],
|
54 |
fn=merge_and_upload,
|
55 |
inputs=[
|
56 |
gr.Slider(minimum=0, maximum=1, value=0.3, label="Weight Drop Probability"),
|
57 |
gr.Number(value=3.0, label="Scaling Factor"),
|
58 |
gr.Textbox(label="Hugging Face Token", type="password"),
|
59 |
-
gr.Textbox(label="Repo Name")
|
|
|
|
|
60 |
],
|
61 |
outputs=gr.Textbox(label="Output"),
|
62 |
title="Model Merger and Uploader",
|
|
|
4 |
import spaces
|
5 |
|
6 |
@spaces.GPU
|
7 |
+
def merge_and_upload(weight_drop_prob, scaling_factor, repo_name, token, base_model, model_to_merge):
|
|
|
|
|
|
|
|
|
|
|
8 |
# Define a fixed output path
|
9 |
output_path = "/home/user/project/output"
|
10 |
+
|
11 |
+
# Write the base model and model to merge to the repo.txt file
|
12 |
+
with open("repo.txt", "w") as repo:
|
13 |
+
repo.write(base_model + "\n" + model_to_merge)
|
14 |
+
|
15 |
# Construct the command to run hf_merge.py
|
16 |
command = [
|
17 |
"python3", "hf_merge.py",
|
18 |
"-p", str(weight_drop_prob),
|
19 |
"-lambda", str(scaling_factor),
|
20 |
+
"-repo_file", "repo.txt",
|
21 |
+
"-output_path", output_path
|
22 |
]
|
23 |
|
24 |
# Run the command and capture the output
|
25 |
result = subprocess.run(command, capture_output=True, text=True)
|
26 |
|
27 |
# Check if the merge was successful
|
28 |
+
if result.returncode!= 0:
|
29 |
return f"Error in merging models: {result.stderr}"
|
30 |
|
31 |
# Upload the result to Hugging Face Hub
|
|
|
36 |
|
37 |
# Upload the file
|
38 |
api.upload_file(
|
39 |
+
path_or_fileobj=output_path + "/merged_model.safetensor",
|
40 |
+
path_in_repo="merged_model.safetensor",
|
41 |
repo_id=repo_name,
|
42 |
token=token
|
43 |
)
|
44 |
return f"Model merged and uploaded successfully to {repo_name}!"
|
45 |
except Exception as e:
|
46 |
+
return f"Error uploading to Hugging Face Hub: {str(e)}"
|
47 |
|
48 |
# Define the Gradio interface
|
49 |
iface = gr.Interface(
|
|
|
|
|
|
|
|
|
|
|
50 |
fn=merge_and_upload,
|
51 |
inputs=[
|
52 |
gr.Slider(minimum=0, maximum=1, value=0.3, label="Weight Drop Probability"),
|
53 |
gr.Number(value=3.0, label="Scaling Factor"),
|
54 |
gr.Textbox(label="Hugging Face Token", type="password"),
|
55 |
+
gr.Textbox(label="Repo Name"),
|
56 |
+
gr.Textbox(label="Base Model"),
|
57 |
+
gr.Textbox(label="Model to Merge")
|
58 |
],
|
59 |
outputs=gr.Textbox(label="Output"),
|
60 |
title="Model Merger and Uploader",
|