|
import gradio as gr |
|
from langchain_core.pydantic_v1 import BaseModel, Field |
|
from langchain.prompts import HumanMessagePromptTemplate, ChatPromptTemplate |
|
from langchain.output_parsers import TextIteratorStreamer |
|
from transformers import AutoModelForCausalLM, AutoTokenizer |
|
|
|
|
|
model_name = "KvrParaskevi/Llama-2-7b-Hotel-Booking-Model" |
|
model = AutoModelForCausalLM.from_pretrained(model_name) |
|
tokenizer = AutoTokenizer.from_pretrained(model_name) |
|
|
|
|
|
def chatbot(message, history): |
|
|
|
prompt_template = HumanMessagePromptTemplate.from_message(message) |
|
|
|
chat_prompt_template = ChatPromptTemplate.from_messages([prompt_template]) |
|
|
|
streamer = TextIteratorStreamer(model, tokenizer, chat_prompt_template) |
|
response = streamer.generate() |
|
return response |
|
|
|
|
|
with gr.Blocks() as demo: |
|
chatbot_interface = gr.Chatbot() |
|
msg = gr.Textbox() |
|
clear = gr.Button("Clear") |
|
|
|
|
|
demo.chatbot_interface = gr.Interface( |
|
fn=chatbot, |
|
inputs="text", |
|
outputs="text", |
|
title="Langchain Chatbot", |
|
description="A simple chatbot using Langchain and Hugging Face" |
|
) |
|
|
|
|
|
demo.launch() |