shivanikerai commited on
Commit
f016ab0
·
verified ·
1 Parent(s): 495346b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -14
app.py CHANGED
@@ -1,13 +1,13 @@
1
  import gradio as gr
2
- import requests
3
- # from transformers import pipeline
4
- # pipe = pipeline("text-generation", model="shivanikerai/TinyLlama-1.1B-Chat-v1.0-seo-optimised-title-suggestion-v1.0")
5
- API_URL = "https://api-inference.huggingface.co/models/shivanikerai/TinyLlama-1.1B-Chat-v1.0-seo-optimised-title-suggestion-v1.0"
6
- def query(payload, api_token):
7
- response = requests.post(API_URL, headers={"Authorization": f"Bearer {api_token}"}, json=payload)
8
- return response.json()
9
 
10
- def my_function(api_token, keywords, product_info):
11
  B_SYS, E_SYS = "<<SYS>>", "<</SYS>>"
12
  B_INST, E_INST = "[INST]", "[/INST]"
13
  B_in, E_in = "[Product Details]", "[/Product Details]"
@@ -15,11 +15,11 @@ def my_function(api_token, keywords, product_info):
15
  prompt = f"""{B_INST} {B_SYS} You are a helpful, respectful and honest assistant for ecommerce product title creation. {E_SYS}
16
  Create a SEO optimized e-commerce product title for the keywords:{keywords.strip()}
17
  {B_in}{product_info}{E_in}\n{E_INST}\n\n{B_out}"""
18
- # predictions = pipe(prompt)
19
- # output=((predictions[0]['generated_text']).split(B_out)[-1]).strip()
20
- output = query({
21
- "inputs": prompt,
22
- },api_token)
23
  return (output)
24
 
25
  # Process the inputs (e.g., concatenate strings, perform calculations)
@@ -28,7 +28,8 @@ def my_function(api_token, keywords, product_info):
28
 
29
  # Create the Gradio interface
30
  interface = gr.Interface(fn=my_function,
31
- inputs=["text", "text", "text"],
 
32
  outputs="text",
33
  title="SEO Optimised Title Suggestion",
34
  description="Enter Keywords and Product Info:")
 
1
  import gradio as gr
2
+ # import requests
3
+ from transformers import pipeline
4
+ pipe = pipeline("text-generation", model="shivanikerai/TinyLlama-1.1B-Chat-v1.0-seo-optimised-title-suggestion-v1.0")
5
+ # API_URL = "https://api-inference.huggingface.co/models/shivanikerai/TinyLlama-1.1B-Chat-v1.0-seo-optimised-title-suggestion-v1.0"
6
+ # def query(payload, api_token):
7
+ # response = requests.post(API_URL, headers={"Authorization": f"Bearer {api_token}"}, json=payload)
8
+ # return response.json()
9
 
10
+ def my_function(keywords, product_info):
11
  B_SYS, E_SYS = "<<SYS>>", "<</SYS>>"
12
  B_INST, E_INST = "[INST]", "[/INST]"
13
  B_in, E_in = "[Product Details]", "[/Product Details]"
 
15
  prompt = f"""{B_INST} {B_SYS} You are a helpful, respectful and honest assistant for ecommerce product title creation. {E_SYS}
16
  Create a SEO optimized e-commerce product title for the keywords:{keywords.strip()}
17
  {B_in}{product_info}{E_in}\n{E_INST}\n\n{B_out}"""
18
+ predictions = pipe(prompt)
19
+ output=((predictions[0]['generated_text']).split(B_out)[-1]).strip()
20
+ # output = query({
21
+ # "inputs": prompt,
22
+ # },api_token)
23
  return (output)
24
 
25
  # Process the inputs (e.g., concatenate strings, perform calculations)
 
28
 
29
  # Create the Gradio interface
30
  interface = gr.Interface(fn=my_function,
31
+ inputs=["text", "text"],
32
+ # inputs=["text", "text", "text"],
33
  outputs="text",
34
  title="SEO Optimised Title Suggestion",
35
  description="Enter Keywords and Product Info:")