KvrParaskevi's picture
Create app.py
c87e284 verified
raw
history blame
1.47 kB
import gradio as gr
from langchain_core.pydantic_v1 import BaseModel, Field
from langchain.prompts import HumanMessagePromptTemplate, ChatPromptTemplate
from langchain.output_parsers import TextIteratorStreamer
from transformers import AutoModelForCausalLM, AutoTokenizer
# Load the Hugging Face model and tokenizer
model_name = "KvrParaskevi/Llama-2-7b-Hotel-Booking-Model"
model = AutoModelForCausalLM.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
# Define the Langchain chatbot function
def chatbot(message, history):
# Create a Langchain prompt template
prompt_template = HumanMessagePromptTemplate.from_message(message)
# Create a Langchain chat prompt template
chat_prompt_template = ChatPromptTemplate.from_messages([prompt_template])
# Use the Langchain TextIteratorStreamer to generate responses
streamer = TextIteratorStreamer(model, tokenizer, chat_prompt_template)
response = streamer.generate()
return response
# Create a Gradio chatbot interface
with gr.Blocks() as demo:
chatbot_interface = gr.Chatbot()
msg = gr.Textbox()
clear = gr.Button("Clear")
# Define the chatbot function as a Gradio interface
demo.chatbot_interface = gr.Interface(
fn=chatbot,
inputs="text",
outputs="text",
title="Langchain Chatbot",
description="A simple chatbot using Langchain and Hugging Face"
)
# Launch the Gradio app
demo.launch()