Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -8,6 +8,7 @@ from transformers import (
|
|
8 |
AutoModelForSeq2SeqLM,
|
9 |
AutoModelForSequenceClassification
|
10 |
)
|
|
|
11 |
|
12 |
chat_history = []
|
13 |
nltk.download('punkt_tab')
|
@@ -68,6 +69,23 @@ def evaluate(passage, reference):
|
|
68 |
evl = Evaluvate()
|
69 |
return evl.get_result(passage, reference)
|
70 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
71 |
def chat_function(user_input, history):
|
72 |
"""Handle individual user chat, with integration to Hugging Face."""
|
73 |
if history is None:
|
|
|
8 |
AutoModelForSeq2SeqLM,
|
9 |
AutoModelForSequenceClassification
|
10 |
)
|
11 |
+
from huggingface_hub import InferenceClient
|
12 |
|
13 |
chat_history = []
|
14 |
nltk.download('punkt_tab')
|
|
|
69 |
evl = Evaluvate()
|
70 |
return evl.get_result(passage, reference)
|
71 |
|
72 |
+
def chat_with_huggingface(api_key, chat_message, history):
|
73 |
+
"""Function to send a chat message to the Hugging Face API and get a response using InferenceClient."""
|
74 |
+
client = InferenceClient(api_key=api_key)
|
75 |
+
messages = [{"role": "user", "content": chat_message}]
|
76 |
+
for message in history:
|
77 |
+
if "role" in message and "content" in message:
|
78 |
+
messages.append(message)
|
79 |
+
try:
|
80 |
+
completion = client.chat.completions.create(
|
81 |
+
model="Qwen/QwQ-32B-Preview",
|
82 |
+
messages=messages,
|
83 |
+
max_tokens=500
|
84 |
+
)
|
85 |
+
return completion.choices[0].message
|
86 |
+
except Exception as e:
|
87 |
+
return {"error": str(e)}
|
88 |
+
|
89 |
def chat_function(user_input, history):
|
90 |
"""Handle individual user chat, with integration to Hugging Face."""
|
91 |
if history is None:
|