Spaces:
Running
Running
File size: 5,733 Bytes
54da33c ac733b6 54da33c b7de1ae 54da33c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 |
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
import gradio as gr
# Set the random seed for reproducibility
torch.random.manual_seed(0)
# Load the model and tokenizer from Hugging Face
model = AutoModelForCausalLM.from_pretrained(
"microsoft/Phi-3.5-mini-instruct",
device_map="cpu",
# device_map="cuda",
torch_dtype="auto",
trust_remote_code=True,
)
tokenizer = AutoTokenizer.from_pretrained("microsoft/Phi-3.5-mini-instruct")
# Create a text-generation pipeline
pipe = pipeline(
"text-generation",
model=model,
tokenizer=tokenizer,
)
# Define the pipeline arguments
generation_args = {
"max_new_tokens": 500,
"return_full_text": False,
"temperature": 0.0,
"do_sample": False,
}
chat_session = {}
# Function to generate responses based on the entire chat history
def generate_response(chat_history):
messages = [{"role": "system", "content": "You are a helpful AI assistant."}]
# Append the chat history (user and assistant messages)
for user_message, assistant_message in chat_history:
messages.append({"role": "user", "content": user_message})
messages.append({"role": "assistant", "content": assistant_message})
# Generate response for the latest user message
user_input = chat_history[-1][0] # The latest user message
messages.append({"role": "user", "content": user_input})
response = pipe(messages, **generation_args)
# Append the assistant response to the chat history
assistant_response = response[0]["generated_text"]
return assistant_response
# Function to update chat
def chat(user_message,history,session):
if session == "":
return history, "Error: Session ID cannot be empty. Please start a new chat."
history = history or [] # Initialize history if empty
# Generate assistant response based on the history
assistant_message = generate_response(history + [(user_message, "")])
# Append user and assistant messages to history
history.append([user_message, assistant_message])
chat_session[session] = history
print("USER : ",user_message)
print("ASSISTANT : ",assistant_message)
return history, ""
def get_session_list():
return list(chat_session.keys())
# Function to create new chat and return updated session list
def new_chat():
session = f'session:{len(chat_session) + 1}'
chat_session[session] = [] # Initialize empty chat history for the new session
return [], "", session, get_session_list() # Return the new session and update session list
# Function to fetch old chat session history
def old_chat(sessions):
return chat_session.get(sessions, [])
# Function to reset chat history
def reset_button():
global chat_session # Access the global chat_session
chat_session = {} # Reset the global chat_session
return [], "", "",[],"" # Reset chat history, session, and input field
with gr.Blocks(css=".small-btn {width: 100px !important;} .large-textbox {width: 100% !important;}") as demo:
gr.Markdown("# 🤖 AI Assistant")
# Add instructions to the UI
gr.Markdown("""
## Steps to Use the AI Assistant:
1. **Start a New Chat**: Click the **'Start New Chat'** button to create a new session.
2. **Send a Message**: Type your message in the input box and either press **Enter** or click **'Send'** to get a response from the AI.
3. **View Available Sessions**: Click **'Get Available Session'** to list all your chat sessions.
4. **Load an Old Session**: You can enter the previous session ID in the **'Load Session'** box and click **'Load Session'**.
5. **Reset All Chats**: Click the **'Reset All'** button to clear all chat sessions and start fresh.
**Session ID**: Every new chat has a unique session ID, which you can use to return to previous conversations.
""")
with gr.Column():
new_chat_button = gr.Button("Start New Chat")
with gr.Row():
with gr.Column(scale=3):
chatbot = gr.Chatbot(elem_id="chatbot")
with gr.Row():
with gr.Column(scale=5):
user_input = gr.Textbox(
show_label=False,
placeholder="Type your message here...",
container=False,
elem_classes="large-textbox"
)
with gr.Column(scale=1):
send_button = gr.Button("Send", variant="primary", elem_classes="small-btn")
with gr.Column(scale=1):
session = gr.Textbox(label="Current Session", interactive=False)
session_list = gr.Dropdown(label="Available Sessions", choices=get_session_list(), allow_custom_value=True)
load_session = gr.Textbox(label="Load Session", interactive=True)
with gr.Row():
get_old_session_button = gr.Button("Load Session")
avail_session = gr.Button("Get Available Session")
reset_button_ = gr.Button("Reset All", variant="secondary")
# Button click actions
user_input.submit(chat, [user_input, chatbot, session], [chatbot, user_input])
send_button.click(chat, [user_input, chatbot, session], [chatbot, user_input]) # Send button
new_chat_button.click(new_chat, [], [chatbot, user_input, session, session_list]) # Also update the session list
get_old_session_button.click(old_chat, [load_session], [chatbot])
reset_button_.click(reset_button, [], [chatbot, session, user_input, session_list, load_session])
avail_session.click(get_session_list, [], [session_list])
# Launch the Gradio app
demo.launch()
|