Spaces:
Paused
Paused
update app
Browse files
app.py
CHANGED
@@ -1,54 +1,24 @@
|
|
1 |
import gradio as gr
|
2 |
from mistralai.client import MistralClient, ChatMessage
|
3 |
-
import faiss
|
4 |
import os
|
5 |
-
import numpy as np
|
6 |
from dotenv import load_dotenv
|
7 |
|
8 |
# Load environment variables
|
9 |
load_dotenv()
|
10 |
api_key = os.getenv('API_KEY')
|
11 |
|
12 |
-
# Initialize Mistral client
|
13 |
client = MistralClient(api_key=api_key)
|
14 |
|
15 |
-
# Assuming your embeddings and FAISS index are preloaded or initialized elsewhere
|
16 |
-
# For demonstration, these steps are not included here
|
17 |
-
# Please replace `index` and `chunks` with your actual data structures
|
18 |
-
index = None # Your FAISS index
|
19 |
-
chunks = [] # Your preprocessed text chunks
|
20 |
-
|
21 |
-
def get_text_embedding(input_text):
|
22 |
-
"""Retrieve text embeddings from Mistral."""
|
23 |
-
embeddings_batch_response = client.embeddings(
|
24 |
-
model="mistral-embed",
|
25 |
-
input=[input_text]
|
26 |
-
)
|
27 |
-
return embeddings_batch_response.data[0].embedding
|
28 |
-
|
29 |
def answer_question(question):
|
30 |
-
"""
|
31 |
-
#
|
32 |
-
|
33 |
-
|
34 |
-
#
|
35 |
-
|
36 |
-
|
37 |
-
# Retrieve and format the relevant chunks as context
|
38 |
-
retrieved_chunks = " ".join([chunks[i] for i in indices.flatten()])
|
39 |
-
prompt = f"""
|
40 |
-
Context information is below.
|
41 |
-
---------------------
|
42 |
-
{retrieved_chunks}
|
43 |
-
---------------------
|
44 |
-
Given the context information and not prior knowledge, answer the query.
|
45 |
-
Query: {question}
|
46 |
-
Answer:
|
47 |
-
"""
|
48 |
|
49 |
-
|
50 |
-
response = run_mistral(prompt)
|
51 |
-
return response
|
52 |
|
53 |
def run_mistral(user_message, model="mistral-medium"):
|
54 |
"""Interact with Mistral using chat."""
|
@@ -57,10 +27,10 @@ def run_mistral(user_message, model="mistral-medium"):
|
|
57 |
return chat_response.choices[0].message.content
|
58 |
|
59 |
app = gr.Interface(fn=answer_question,
|
60 |
-
inputs=gr.inputs.Textbox(lines=2, placeholder="Ask a question
|
61 |
outputs="text",
|
62 |
-
title="
|
63 |
-
description="Ask any question
|
64 |
|
65 |
if __name__ == "__main__":
|
66 |
-
app.launch()
|
|
|
1 |
import gradio as gr
|
2 |
from mistralai.client import MistralClient, ChatMessage
|
|
|
3 |
import os
|
|
|
4 |
from dotenv import load_dotenv
|
5 |
|
6 |
# Load environment variables
|
7 |
load_dotenv()
|
8 |
api_key = os.getenv('API_KEY')
|
9 |
|
10 |
+
# Initialize Mistral client with the API key
|
11 |
client = MistralClient(api_key=api_key)
|
12 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
def answer_question(question):
|
14 |
+
"""Directly ask Mistral the question and return the answer."""
|
15 |
+
# Format the user's question for Mistral
|
16 |
+
user_message = question
|
17 |
+
|
18 |
+
# Use the run_mistral function to get an answer
|
19 |
+
answer = run_mistral(user_message)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
|
21 |
+
return answer
|
|
|
|
|
22 |
|
23 |
def run_mistral(user_message, model="mistral-medium"):
|
24 |
"""Interact with Mistral using chat."""
|
|
|
27 |
return chat_response.choices[0].message.content
|
28 |
|
29 |
app = gr.Interface(fn=answer_question,
|
30 |
+
inputs=gr.inputs.Textbox(lines=2, placeholder="Ask a question..."),
|
31 |
outputs="text",
|
32 |
+
title="Your Assistant",
|
33 |
+
description="Ask any question, and I'll try to provide an informative answer.")
|
34 |
|
35 |
if __name__ == "__main__":
|
36 |
+
app.launch(share=True) # Set `share=True` to create a public link
|