Kvikontent commited on
Commit
e0d9793
·
verified ·
1 Parent(s): 861cbf8

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +53 -0
app.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from huggingface_hub import InferenceClient
3
+
4
+ # Set up the model and client
5
+ model_name = "01-ai/Yi-1.5-34B-Chat"
6
+ client = InferenceClient(model_name, token=os.getenv("API_KEY"))
7
+
8
+ chat_history = []
9
+
10
+ # Streamlit app layout
11
+ st.title("Chat with Yi-1.5-34B")
12
+ st.write("Type 'quit' to exit the chat.")
13
+
14
+ # Chat area
15
+ chat_container = st.empty()
16
+
17
+ # User input area
18
+ user_input = st.text_input("Enter your message:", key="input")
19
+
20
+ # Send message button
21
+ send_button = st.button("Send", on_click=lambda: send_message(user_input))
22
+
23
+ # Function to handle sending messages
24
+ def send_message(message):
25
+ if message:
26
+ global chat_history
27
+ chat_history.append({"role": "user", "content": message})
28
+
29
+ # Clear the input field
30
+ st.session_state.input = ""
31
+
32
+ # Generate response
33
+ response = client.chat_completion(
34
+ messages=chat_history,
35
+ max_tokens=500,
36
+ stream=True,
37
+ )
38
+
39
+ # Display response in the chat area
40
+ with chat_container.container():
41
+ for message in response:
42
+ st.write(message.choices[0].delta.content, unsafe_allow_html=True)
43
+
44
+ # Add response to chat history
45
+ chat_history.append({"role": "assistant", "content": response[-1].choices[0].message.content})
46
+
47
+ # Display chat history
48
+ with chat_container.container():
49
+ for message in chat_history:
50
+ if message["role"] == "user":
51
+ st.write(f"**You:** {message['content']}")
52
+ else:
53
+ st.write(f"**Yi:** {message['content']}")