Spaces:
Runtime error
Runtime error
adamtappis
commited on
Commit
·
07fe581
1
Parent(s):
ccc0d10
Update app.py
Browse files
app.py
CHANGED
@@ -1,35 +1,45 @@
|
|
1 |
-
import torch
|
2 |
-
from peft import PeftModel, PeftConfig
|
3 |
-
from transformers import AutoModelForCausalLM, AutoTokenizer
|
4 |
-
from IPython.display import display, Markdown
|
5 |
|
6 |
-
peft_model_id = f"adamtappis/marketing_emails_model"
|
7 |
-
config = PeftConfig.from_pretrained(peft_model_id)
|
8 |
-
model = AutoModelForCausalLM.from_pretrained(config.base_model_name_or_path, return_dict=True, load_in_8bit=False)
|
9 |
-
tokenizer = AutoTokenizer.from_pretrained(config.base_model_name_or_path)
|
10 |
|
11 |
# Load the Lora model
|
12 |
-
model = PeftModel.from_pretrained(model, peft_model_id)
|
13 |
|
14 |
-
def make_inference(product, description):
|
15 |
-
|
|
|
|
|
|
|
|
|
|
|
16 |
|
17 |
-
|
18 |
-
|
|
|
|
|
|
|
19 |
|
20 |
-
|
|
|
21 |
|
22 |
-
if __name__ == "__main__":
|
23 |
-
# make a gradio interface
|
24 |
-
import gradio as gr
|
25 |
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
|
|
|
|
|
|
|
|
|
1 |
+
# import torch
|
2 |
+
# from peft import PeftModel, PeftConfig
|
3 |
+
# from transformers import AutoModelForCausalLM, AutoTokenizer
|
4 |
+
# from IPython.display import display, Markdown
|
5 |
|
6 |
+
# peft_model_id = f"adamtappis/marketing_emails_model"
|
7 |
+
# config = PeftConfig.from_pretrained(peft_model_id)
|
8 |
+
# model = AutoModelForCausalLM.from_pretrained(config.base_model_name_or_path, return_dict=True, load_in_8bit=False)
|
9 |
+
# tokenizer = AutoTokenizer.from_pretrained(config.base_model_name_or_path)
|
10 |
|
11 |
# Load the Lora model
|
12 |
+
# model = PeftModel.from_pretrained(model, peft_model_id)
|
13 |
|
14 |
+
# def make_inference(product, description):
|
15 |
+
# batch = tokenizer(f"### INSTRUCTION\nBelow is a product and description, please write a marketing email for this product.\n\n### Product:\n{product}\n### Description:\n{description}\n\n### Marketing Email:\n", return_tensors='pt')
|
16 |
+
#
|
17 |
+
# with torch.cuda.amp.autocast():
|
18 |
+
# output_tokens = model.generate(**batch, max_new_tokens=200)
|
19 |
+
#
|
20 |
+
# display(Markdown((tokenizer.decode(output_tokens[0], skip_special_tokens=True))))
|
21 |
|
22 |
+
import gradio as gr
|
23 |
+
from transformers import pipeline
|
24 |
+
pipe = pipeline("Marketing", model="adamtappis/marketing_emails_model")
|
25 |
+
demo = gr.Interface.from_pipeline(pipe)
|
26 |
+
demo.launch()
|
27 |
|
28 |
+
# def predict(text):
|
29 |
+
# return pipe(text)[0]["translation_text"]
|
30 |
|
|
|
|
|
|
|
31 |
|
32 |
+
# if __name__ == "__main__":
|
33 |
+
# # make a gradio interface
|
34 |
+
# import gradio as gr
|
35 |
+
#
|
36 |
+
# gr.Interface(
|
37 |
+
# make_inference,
|
38 |
+
# [
|
39 |
+
# gr.inputs.Textbox(lines=1, label="Product Name"),
|
40 |
+
# gr.inputs.Textbox(lines=1, label="Product Description"),
|
41 |
+
# ],
|
42 |
+
# gr.outputs.Textbox(label="Email"),
|
43 |
+
# title="🗣️Marketing Email Generator📄",
|
44 |
+
# description="🗣️Marketing Email Generator📄 is a tool that allows you to generate marketing emails for different products",
|
45 |
+
# ).launch()
|