c123ian commited on
Commit
6905ea3
·
verified ·
1 Parent(s): 41a2b80

deploy at 2024-08-16 14:03:27.820502

Browse files
Files changed (4) hide show
  1. Dockerfile +10 -0
  2. config.ini +5 -0
  3. main.py +68 -0
  4. requirements.txt +15 -0
Dockerfile ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.10
2
+ WORKDIR /code
3
+ COPY --link --chown=1000 . .
4
+ RUN mkdir -p /tmp/cache/
5
+ RUN chmod a+rwx -R /tmp/cache/
6
+ ENV HF_HUB_CACHE=HF_HOME
7
+ RUN pip install --no-cache-dir -r requirements.txt
8
+
9
+ ENV PYTHONUNBUFFERED=1 PORT=7860
10
+ CMD ["python", "main.py"]
config.ini ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ [DEFAULT]
2
+ dataset_id = space-backup
3
+ db_dir = data
4
+ private_backup = True
5
+
main.py ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fasthtml.common import *
2
+ from fasthtml_hf import setup_hf_backup # Importing setup_hf_backup
3
+ import openai
4
+ import uvicorn
5
+
6
+ # Set up the app, including daisyui and tailwind for the chat component
7
+ tlink = Script(src="https://cdn.tailwindcss.com")
8
+ dlink = Link(rel="stylesheet", href="https://cdn.jsdelivr.net/npm/[email protected]/dist/full.min.css")
9
+ app = FastHTML(hdrs=(tlink, dlink, picolink))
10
+
11
+ # Set up OpenAI API (https://openai.com/)
12
+ openai.api_key = os.getenv("OPENAI_API_KEY")
13
+ messages = []
14
+
15
+
16
+ # Chat message component (renders a chat bubble)
17
+ def ChatMessage(msg):
18
+ bubble_class = f"chat-bubble-{'primary' if msg['role'] == 'user' else 'secondary'}"
19
+ chat_class = f"chat-{'end' if msg['role'] == 'user' else 'start'}"
20
+ return Div(Div(msg['role'], cls="chat-header"),
21
+ Div(msg['content'], cls=f"chat-bubble {bubble_class}"),
22
+ cls=f"chat {chat_class}")
23
+
24
+ # The input field for the user message. Also used to clear the
25
+ # input field after sending a message via an OOB swap
26
+ def ChatInput():
27
+ return Input(type="text", name='msg', id='msg-input',
28
+ placeholder="Type a message",
29
+ cls="input input-bordered w-full", hx_swap_oob='true')
30
+
31
+ # The main screen
32
+ @app.route("/")
33
+ def get():
34
+ page = Body(H1('Chatbot Demo'),
35
+ Div(*[ChatMessage(msg) for msg in messages],
36
+ id="chatlist", cls="chat-box h-[73vh] overflow-y-auto"),
37
+ Form(Group(ChatInput(), Button("Send", cls="btn btn-primary")),
38
+ hx_post="/", hx_target="#chatlist", hx_swap="beforeend",
39
+ cls="flex space-x-2 mt-2",
40
+ ), cls="p-4 max-w-lg mx-auto")
41
+ return Title('Chatbot Demo'), page
42
+
43
+ # Handle the form submission
44
+ @app.post("/")
45
+ def post(msg:str):
46
+ messages.append({"role":"user", "content":msg})
47
+
48
+ # Get response from OpenAI API
49
+ response = openai.chat.completions.create(
50
+ model="gpt-3.5-turbo",
51
+ messages=[
52
+ {"role": "system", "content": "You are a helpful and concise assistant."},
53
+ *messages
54
+ ]
55
+ )
56
+
57
+ assistant_msg = response.choices[0].message.content.strip()
58
+ messages.append({"role":"assistant", "content":assistant_msg})
59
+
60
+ return (ChatMessage(messages[-2]), # The user's message
61
+ ChatMessage(messages[-1]), # The chatbot's response
62
+ ChatInput()) # And clear the input field via an OOB swap
63
+
64
+
65
+ #serve()
66
+ if __name__ == "__main__":
67
+ setup_hf_backup(app)
68
+ uvicorn.run(app, host="0.0.0.0", port=7860)
requirements.txt ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ fasthtml-hf>=0.1.1
2
+ python-fasthtml>=0.0.8
3
+ huggingface-hub>=0.20.0
4
+ uvicorn>=0.29
5
+ pandas
6
+ numpy
7
+ lancedb
8
+ srsly
9
+ cohere
10
+ python-dotenv
11
+ tantivy
12
+ beautifulsoup4
13
+ retry
14
+ transformers
15
+ torch