File size: 4,649 Bytes
f709b40 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 |
### 010125-daysoff-assistant-api
import os
import time
import json
import torch
from api_docs_mck import daysoff_api_docs
import chainlit as cl
from langchain import hub
from langchain.chains import LLMChain, APIChain
from langchain_core.prompts import PromptTemplate
from langchain_community.llms import HuggingFaceHub
from langchain.memory.buffer import ConversationBufferMemory
HUGGINGFACEHUB_API_TOKEN = os.getenv("HUGGINGFACEHUB_API_TOKEN")
LANGCHAIN_API_KEY = os.environ.get("LANGCHAIN_API_KEY")
HF_TOKEN = os.environ.get("HF_TOKEN")
#os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:true"
dtype = torch.float16
device = torch.device("cuda")
daysoff_assistant_booking_template = """
You are a customer support assistant for Daysoff.no. Your expertise is
retrieving booking information for a given booking ID (โbestillingsnummerโ)"
Chat History: {chat_history}
Question: {question}
Answer:
"""
daysoff_assistant_booking_prompt= PromptTemplate(
input_variables=["chat_history", "question"],
template=daysoff_assistant_booking_template
)
api_url_template = """
Given the following API Documentation for Daysoff's official
booking information API: {api_docs_mck}
Your task is to construct the most efficient API URL to answer
the user's question, ensuring the
call is optimized to include only the necessary information.
Question: {question}
API URL:
"""
api_url_prompt = PromptTemplate(input_variables=['api_docs_mck', 'question'],
template=api_url_template)
api_response_template = """"
With the API Documentation for Daysoff's official API: {api_docs_mck}
and the specific user question: {question} in mind,
and given this API URL: {api_url} for querying, here is the
response from Daysoff's API: {api_response}.
Please provide a summary that directly addresses the user's question,
omitting technical details like response format, and
focusing on delivering the answer with clarity and conciseness,
as if a human customer service agent is providing this information.
Adapt to user's language. By default, you speak Norwegian.
Summary:
"""
api_response_prompt = PromptTemplate(input_variables=['api_docs_mck',
'question',
'api_url',
'api_response'],
template=api_response_template)
# --model, memory object, and llm_chain
@cl.on_chat_start
def setup_multiple_chains():
llm = HuggingFaceHub(repo_id="google/gemma-2-2b-it",
temperature=0.7,
huggingface_api_token=HUGGINGFACEHUB_API_TOKEN,
device=device)
conversation_memory = ConversationBufferMemory(memory_key="chat_history",
max_len=200,
return_messages=True,
)
llm_chain = LLMChain(llm=llm,
prompt=daysoff_assistant_booking_prompt,
memory=conversation_memory
)
cl.user_session.set("llm_chain", llm_chain)
api_chain = APIChain.from_llm_and_api_docs_mck(
llm=llm,
api_docs_mck=daysoff_api_docs,
api_url_prompt=api_url_prompt,
api_response_prompt=api_response_prompt,
verbose=True,
limit_to_domains=None)
cl.user_session.set("api_chain", api_chain)
# --wrapper function around the @cl.on_message decorator; chain trigger(s)
@cl.on_message
async def handle_message(message: cl.Message):
user_message = message.content.lower()
llm_chain = cl.user_session.get("llm_chain")
api_chain = cl.user_session.get("api_chain")
if any(keyword in user_message for keyword in ["booking_id", "full_name", "amount", # + "bestillingsnummer", "checkin", "checkout" for api_docs
"date", "address", "amount", "user_id"]):
# --if any keywords in user_message, use api_chain
response = await api_chain.acall(user_message,
callbacks=[cl.AsyncLangchainCallbackHandler()])
else:
# --defaults to llm_chain4general queries
response = await llm_chain.acall(user_message,
callbacks=[cl.AsyncLangchainCallbackHandler()])
response_key = "output" if "output" in response else "text"
await cl.Message(response.get(response_key, "")).send()
if __name__ == '__main__':
cl.launch()
|