Spaces:
Running
Running
from flask import Flask, request, jsonify, send_from_directory | |
import requests | |
import os | |
from dotenv import load_dotenv | |
load_dotenv() | |
app = Flask(__name__) | |
API_URL = "/static-proxy?url=https%3A%2F%2Fapi-inference.huggingface.co%2Fmodels%2F%26quot%3B%3C%2Fspan%3E%3C!-- HTML_TAG_END --> | |
headers = {"Authorization": f"Bearer {os.getenv('HUGGINGFACE_API_KEY')}"} | |
# Sample text for testing | |
sample_text = """ | |
This is a sample text for testing our RAG chatbot. | |
It contains information about artificial intelligence and machine learning. | |
AI and ML are revolutionizing various industries and improving efficiency. | |
""" | |
def query(payload, model): | |
response = requests.post(API_URL + model, headers=headers, json=payload) | |
return response.json() | |
def home(): | |
return send_from_directory('.', 'index.html') | |
def ask(): | |
prompt = request.json['question'] | |
# Use sentence-transformers model for embedding | |
embedding_model = "sentence-transformers/all-MiniLM-L6-v2" | |
context_embedding = query({"inputs": sample_text}, embedding_model)[0] | |
query_embedding = query({"inputs": prompt}, embedding_model)[0] | |
# Simple dot product similarity | |
similarity = sum(a*b for a, b in zip(context_embedding, query_embedding)) | |
# Generate response using T5 model | |
generator_model = "google/flan-t5-small" | |
input_text = f"Context: {sample_text}\n\nQuestion: {prompt}\n\nAnswer:" | |
response = query({"inputs": input_text}, generator_model)[0]["generated_text"] | |
return jsonify({'response': response}) | |
if __name__ == '__main__': | |
app.run(debug=True) |