camparchimedes's picture
Create app.py
f709b40 verified
raw
history blame
4.65 kB
### 010125-daysoff-assistant-api
import os
import time
import json
import torch
from api_docs_mck import daysoff_api_docs
import chainlit as cl
from langchain import hub
from langchain.chains import LLMChain, APIChain
from langchain_core.prompts import PromptTemplate
from langchain_community.llms import HuggingFaceHub
from langchain.memory.buffer import ConversationBufferMemory
HUGGINGFACEHUB_API_TOKEN = os.getenv("HUGGINGFACEHUB_API_TOKEN")
LANGCHAIN_API_KEY = os.environ.get("LANGCHAIN_API_KEY")
HF_TOKEN = os.environ.get("HF_TOKEN")
#os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:true"
dtype = torch.float16
device = torch.device("cuda")
daysoff_assistant_booking_template = """
You are a customer support assistant for Daysoff.no. Your expertise is
retrieving booking information for a given booking ID (โ€™bestillingsnummerโ€™)"
Chat History: {chat_history}
Question: {question}
Answer:
"""
daysoff_assistant_booking_prompt= PromptTemplate(
input_variables=["chat_history", "question"],
template=daysoff_assistant_booking_template
)
api_url_template = """
Given the following API Documentation for Daysoff's official
booking information API: {api_docs_mck}
Your task is to construct the most efficient API URL to answer
the user's question, ensuring the
call is optimized to include only the necessary information.
Question: {question}
API URL:
"""
api_url_prompt = PromptTemplate(input_variables=['api_docs_mck', 'question'],
template=api_url_template)
api_response_template = """"
With the API Documentation for Daysoff's official API: {api_docs_mck}
and the specific user question: {question} in mind,
and given this API URL: {api_url} for querying, here is the
response from Daysoff's API: {api_response}.
Please provide a summary that directly addresses the user's question,
omitting technical details like response format, and
focusing on delivering the answer with clarity and conciseness,
as if a human customer service agent is providing this information.
Adapt to user's language. By default, you speak Norwegian.
Summary:
"""
api_response_prompt = PromptTemplate(input_variables=['api_docs_mck',
'question',
'api_url',
'api_response'],
template=api_response_template)
# --model, memory object, and llm_chain
@cl.on_chat_start
def setup_multiple_chains():
llm = HuggingFaceHub(repo_id="google/gemma-2-2b-it",
temperature=0.7,
huggingface_api_token=HUGGINGFACEHUB_API_TOKEN,
device=device)
conversation_memory = ConversationBufferMemory(memory_key="chat_history",
max_len=200,
return_messages=True,
)
llm_chain = LLMChain(llm=llm,
prompt=daysoff_assistant_booking_prompt,
memory=conversation_memory
)
cl.user_session.set("llm_chain", llm_chain)
api_chain = APIChain.from_llm_and_api_docs_mck(
llm=llm,
api_docs_mck=daysoff_api_docs,
api_url_prompt=api_url_prompt,
api_response_prompt=api_response_prompt,
verbose=True,
limit_to_domains=None)
cl.user_session.set("api_chain", api_chain)
# --wrapper function around the @cl.on_message decorator; chain trigger(s)
@cl.on_message
async def handle_message(message: cl.Message):
user_message = message.content.lower()
llm_chain = cl.user_session.get("llm_chain")
api_chain = cl.user_session.get("api_chain")
if any(keyword in user_message for keyword in ["booking_id", "full_name", "amount", # + "bestillingsnummer", "checkin", "checkout" for api_docs
"date", "address", "amount", "user_id"]):
# --if any keywords in user_message, use api_chain
response = await api_chain.acall(user_message,
callbacks=[cl.AsyncLangchainCallbackHandler()])
else:
# --defaults to llm_chain4general queries
response = await llm_chain.acall(user_message,
callbacks=[cl.AsyncLangchainCallbackHandler()])
response_key = "output" if "output" in response else "text"
await cl.Message(response.get(response_key, "")).send()
if __name__ == '__main__':
cl.launch()