hereoncollab commited on
Commit
279e516
·
verified ·
1 Parent(s): b6144f6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -10
app.py CHANGED
@@ -1,23 +1,24 @@
1
  import gradio as gr
2
  from transformers import pipeline
3
 
4
-
5
- pipe = pipeline("text-generation", model="distilgpt2")
6
 
7
  def generate_response(user_input):
8
- formatted_input = f"Human: {user_input}\nAI:"
9
- response = pipe(formatted_input, max_length=150, num_return_sequences=1)
 
10
  generated_text = response[0]['generated_text']
11
- ai_response = generated_text.split('AI:')[-1].strip()
12
- return ai_response
13
-
14
 
 
15
  interface = gr.Interface(
16
  fn=generate_response,
17
- inputs=gr.Textbox(label="shitty mesage:", lines=2, placeholder="Type your message here..."),
18
  outputs="text",
19
- title="distilgpt2",
20
- description="distel gppt 20"
21
  )
22
 
 
23
  interface.launch()
 
1
  import gradio as gr
2
  from transformers import pipeline
3
 
4
+ # Initialize the text generation pipeline with the Gemma 2-2B IT model
5
+ pipe = pipeline("text-generation", model="google/gemma-2-2b-it")
6
 
7
  def generate_response(user_input):
8
+ # Generate text based on the user's input
9
+ response = pipe(user_input, max_length=100, num_return_sequences=1)
10
+ # Extract the generated text
11
  generated_text = response[0]['generated_text']
12
+ return generated_text
 
 
13
 
14
+ # Create the Gradio interface
15
  interface = gr.Interface(
16
  fn=generate_response,
17
+ inputs=gr.Textbox(label="prompt:", lines=2, placeholder="prompt"),
18
  outputs="text",
19
+ title="Gemma",
20
+ description="Prompt gemma-2b"
21
  )
22
 
23
+ # Launch the Gradio app
24
  interface.launch()