Aditya0619 commited on
Commit
1c15c4f
·
verified ·
1 Parent(s): 3f6e910

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +28 -38
app.py CHANGED
@@ -1,79 +1,69 @@
1
  import gradio as gr
2
- from transformers import pipeline, Conversation
3
 
4
- # Initialize the model pipeline (Hugging Face conversational model)
5
  chatbot_pipeline = pipeline("text-generation", model="Aditya0619/Medbot")
6
 
7
- # Define the bot's response function
8
  def respond(message, history, system_message, max_tokens, temperature, top_p):
9
- # If no conversation history, initialize an empty one
10
  if history is None:
11
  history = []
12
 
13
- # Build the conversation object with past messages
14
- conversation = Conversation()
15
  for user_input, bot_response in history:
16
- conversation.add_user_input(user_input)
17
- conversation.append_response(bot_response)
18
 
19
- # Add the latest user message
20
- conversation.add_user_input(message)
21
-
22
- # Generate a response using the chatbot pipeline
23
- result = chatbot_pipeline(
24
- conversation,
25
  max_length=max_tokens,
26
  temperature=temperature,
27
  top_p=top_p,
28
- pad_token_id=50256 # Avoid padding errors for some models like GPT-2 variants
29
- )
30
 
31
- # Get the latest response from the model
32
- bot_response = result.generated_responses[-1]
33
 
34
- # Update the history with the new exchange
35
- history.append((message, bot_response))
36
  return history, history
37
 
38
- # Define the UI components and layout
39
  with gr.Blocks() as demo:
40
- # Title and description
41
- gr.Markdown("# 🤖 AI Chatbot with Memory\nThis chatbot remembers your previous messages.")
42
 
43
- # Input fields for system message and settings
44
  system_message = gr.Textbox(
45
  label="System Message (Optional)",
46
  placeholder="e.g., You are a helpful assistant."
47
  )
48
- max_tokens = gr.Slider(
49
- label="Max Tokens", minimum=50, maximum=500, value=250, step=10
50
- )
51
- temperature = gr.Slider(
52
- label="Temperature", minimum=0.0, maximum=1.0, value=0.7, step=0.1
53
- )
54
- top_p = gr.Slider(
55
- label="Top P", minimum=0.0, maximum=1.0, value=0.9, step=0.1
56
- )
57
 
58
- # Chatbot interface
59
  chatbot = gr.Chatbot(label="Chat with AI")
60
- user_input = gr.Textbox(label="Your Message", placeholder="Type a message...")
61
 
62
  # Hidden state to store conversation history
63
  state = gr.State([])
64
 
65
- # Submit button to trigger the response
66
  submit = gr.Button("Send")
67
 
68
- # Link the input and chatbot response function
69
  submit.click(
70
  respond,
71
  inputs=[user_input, state, system_message, max_tokens, temperature, top_p],
72
  outputs=[chatbot, state]
73
  )
74
 
75
- # Display an initial greeting message
76
  demo.load(lambda: [("Hi! How can I assist you today?", "")], outputs=chatbot)
77
 
78
  # Launch the Gradio app
79
  demo.launch()
 
 
1
  import gradio as gr
2
+ from transformers import pipeline
3
 
4
+ # Initialize the conversational model pipeline
5
  chatbot_pipeline = pipeline("text-generation", model="Aditya0619/Medbot")
6
 
7
+ # Function to manage history and generate responses
8
  def respond(message, history, system_message, max_tokens, temperature, top_p):
9
+ # Initialize history if it's None
10
  if history is None:
11
  history = []
12
 
13
+ # Build input by concatenating past messages (user-bot pairs)
14
+ chat_input = ""
15
  for user_input, bot_response in history:
16
+ chat_input += f"User: {user_input}\nBot: {bot_response}\n"
17
+ chat_input += f"User: {message}\nBot:"
18
 
19
+ # Generate a response using the pipeline
20
+ response = chatbot_pipeline(
21
+ chat_input,
 
 
 
22
  max_length=max_tokens,
23
  temperature=temperature,
24
  top_p=top_p,
25
+ pad_token_id=50256 # Avoids padding errors with models like GPT-2
26
+ )[0]["generated_text"].split("Bot:")[-1].strip()
27
 
28
+ # Update history with the new interaction
29
+ history.append((message, response))
30
 
31
+ # Return the updated chat history
 
32
  return history, history
33
 
34
+ # Define the Gradio app layout
35
  with gr.Blocks() as demo:
36
+ gr.Markdown("# 🤖 AI Chatbot with Memory\nChat with the bot and it will remember your conversation!")
 
37
 
38
+ # Input fields for settings
39
  system_message = gr.Textbox(
40
  label="System Message (Optional)",
41
  placeholder="e.g., You are a helpful assistant."
42
  )
43
+ max_tokens = gr.Slider(label="Max Tokens", minimum=50, maximum=500, value=250, step=10)
44
+ temperature = gr.Slider(label="Temperature", minimum=0.0, maximum=1.0, value=0.7, step=0.1)
45
+ top_p = gr.Slider(label="Top P", minimum=0.0, maximum=1.0, value=0.9, step=0.1)
 
 
 
 
 
 
46
 
47
+ # Chatbot interface and user input
48
  chatbot = gr.Chatbot(label="Chat with AI")
49
+ user_input = gr.Textbox(label="Your Message", placeholder="Type a message...", lines=2)
50
 
51
  # Hidden state to store conversation history
52
  state = gr.State([])
53
 
54
+ # Submit button to send messages
55
  submit = gr.Button("Send")
56
 
57
+ # Link the user input and chatbot response function
58
  submit.click(
59
  respond,
60
  inputs=[user_input, state, system_message, max_tokens, temperature, top_p],
61
  outputs=[chatbot, state]
62
  )
63
 
64
+ # Initial greeting message
65
  demo.load(lambda: [("Hi! How can I assist you today?", "")], outputs=chatbot)
66
 
67
  # Launch the Gradio app
68
  demo.launch()
69
+