Spaces:
Runtime error
Runtime error
#for learning | |
import os | |
#import openai | |
import gradio as gr | |
from llama_index.readers.file import PDFReader | |
### added to remove openapi | |
from transformers import AutoModelForCausalLM | |
import torch | |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu") | |
#model_name = "baffo32/decapoda-research-llama-7B-hf" | |
model_name = "meta-llama/Llama-3.2-3B" | |
model = AutoModelForCausalLM.from_pretrained(model_name).to(device) | |
### added to remove openapi | |
#openai.api_key = os.environ.get('O_APIKey') | |
Data_Read = os.environ.get('Data_Reader') | |
ChurnData = os.environ.get('Churn_Data') | |
ChurnData2 = os.environ.get('Churn_Data2') | |
#read data orig | |
#from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, SummaryIndex, download_loader | |
#DataReader = download_loader(Data_Read) | |
#loader = DataReader() | |
#read data | |
from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, SummaryIndex | |
loader = PDFReader() | |
### 1st file | |
documents = loader.load_data(file=ChurnData) | |
### 1st file | |
### 2nd file | |
documents2 = loader.load_data(file=ChurnData2) | |
documents = documents + documents2 | |
### 2nd file | |
index = VectorStoreIndex.from_documents(documents) | |
query_engine = index.as_query_engine() | |
def reply(message, history): | |
answer = str(query_engine.query(message)) | |
return answer | |
Conversing = gr.ChatInterface(reply, chatbot=gr.Chatbot(height="70vh",label="Conversation"), retry_btn=None,theme=gr.themes.Monochrome(), | |
title = 'E-Commerce BT/AN/CA/DH/VE/GA CMS Q&A', undo_btn = None, clear_btn = None, css='footer {visibility: hidden}').launch() |