Spaces:
Running
Running
wwwillchen
commited on
Commit
·
75fb516
1
Parent(s):
89542ff
Completed - part 5
Browse files
main.py
CHANGED
@@ -75,13 +75,15 @@ ROOT_BOX_STYLE = me.Style(
|
|
75 |
flex_direction="column",
|
76 |
)
|
77 |
|
|
|
|
|
|
|
|
|
78 |
@me.page(
|
79 |
path="/",
|
80 |
-
stylesheets=
|
81 |
-
"https://fonts.googleapis.com/css2?family=Inter:[email protected]&display=swap"
|
82 |
-
],
|
83 |
)
|
84 |
-
def
|
85 |
model_picker_dialog()
|
86 |
with me.box(style=ROOT_BOX_STYLE):
|
87 |
header()
|
@@ -95,36 +97,141 @@ def page():
|
|
95 |
"Chat with multiple models at once",
|
96 |
style=me.Style(font_size=20, margin=me.Margin(bottom=24)),
|
97 |
)
|
|
|
|
|
98 |
chat_input()
|
99 |
-
display_conversations()
|
100 |
|
101 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
102 |
state = me.state(State)
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
119 |
|
120 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
121 |
me.markdown(message.content)
|
122 |
if message.in_progress:
|
123 |
me.progress_spinner()
|
124 |
|
|
|
125 |
def header():
|
|
|
|
|
|
|
|
|
|
|
126 |
with me.box(
|
|
|
127 |
style=me.Style(
|
|
|
128 |
padding=me.Padding.all(16),
|
129 |
),
|
130 |
):
|
@@ -138,6 +245,7 @@ def header():
|
|
138 |
),
|
139 |
)
|
140 |
|
|
|
141 |
def switch_model(e: me.ClickEvent):
|
142 |
state = me.state(State)
|
143 |
state.is_model_picker_dialog_open = True
|
@@ -195,6 +303,7 @@ def on_blur(e: me.InputBlurEvent):
|
|
195 |
def send_prompt(e: me.ClickEvent):
|
196 |
state = me.state(State)
|
197 |
if not state.conversations:
|
|
|
198 |
for model in state.models:
|
199 |
state.conversations.append(Conversation(model=model, messages=[]))
|
200 |
input = state.input
|
@@ -207,7 +316,7 @@ def send_prompt(e: me.ClickEvent):
|
|
207 |
messages.append(ChatMessage(role="user", content=input))
|
208 |
messages.append(ChatMessage(role="model", in_progress=True))
|
209 |
yield
|
210 |
-
|
211 |
if model == Models.GEMINI_1_5_FLASH.value:
|
212 |
llm_response = gemini.send_prompt_flash(input, history)
|
213 |
elif model == Models.GEMINI_1_5_PRO.value:
|
@@ -216,10 +325,8 @@ def send_prompt(e: me.ClickEvent):
|
|
216 |
llm_response = claude.call_claude_sonnet(input, history)
|
217 |
else:
|
218 |
raise Exception("Unhandled model", model)
|
219 |
-
|
220 |
for chunk in llm_response:
|
221 |
messages[-1].content += chunk
|
222 |
yield
|
223 |
messages[-1].in_progress = False
|
224 |
yield
|
225 |
-
|
|
|
75 |
flex_direction="column",
|
76 |
)
|
77 |
|
78 |
+
STYLESHEETS = [
|
79 |
+
"https://fonts.googleapis.com/css2?family=Inter:[email protected]&display=swap"
|
80 |
+
]
|
81 |
+
|
82 |
@me.page(
|
83 |
path="/",
|
84 |
+
stylesheets=STYLESHEETS,
|
|
|
|
|
85 |
)
|
86 |
+
def home_page():
|
87 |
model_picker_dialog()
|
88 |
with me.box(style=ROOT_BOX_STYLE):
|
89 |
header()
|
|
|
97 |
"Chat with multiple models at once",
|
98 |
style=me.Style(font_size=20, margin=me.Margin(bottom=24)),
|
99 |
)
|
100 |
+
# Uncomment this in the next step:
|
101 |
+
examples_row()
|
102 |
chat_input()
|
|
|
103 |
|
104 |
+
EXAMPLES = [
|
105 |
+
"Create a file-lock in Python",
|
106 |
+
"Write an email to Congress to have free milk for all",
|
107 |
+
"Make a nice box shadow in CSS",
|
108 |
+
]
|
109 |
+
|
110 |
+
def examples_row():
|
111 |
+
with me.box(
|
112 |
+
style=me.Style(
|
113 |
+
display="flex", flex_direction="row", gap=16, margin=me.Margin(bottom=24)
|
114 |
+
)
|
115 |
+
):
|
116 |
+
for i in EXAMPLES:
|
117 |
+
example(i)
|
118 |
+
|
119 |
+
def example(text: str):
|
120 |
+
with me.box(
|
121 |
+
key=text,
|
122 |
+
on_click=click_example,
|
123 |
+
style=me.Style(
|
124 |
+
cursor="pointer",
|
125 |
+
background="#b9e1ff",
|
126 |
+
width="215px",
|
127 |
+
height=160,
|
128 |
+
font_weight=500,
|
129 |
+
line_height="1.5",
|
130 |
+
padding=me.Padding.all(16),
|
131 |
+
border_radius=16,
|
132 |
+
border=me.Border.all(me.BorderSide(width=1, color="blue", style="none")),
|
133 |
+
),
|
134 |
+
):
|
135 |
+
me.text(text)
|
136 |
+
|
137 |
+
def click_example(e: me.ClickEvent):
|
138 |
state = me.state(State)
|
139 |
+
state.input = e.key
|
140 |
+
|
141 |
+
@me.page(path="/conversation", stylesheets=STYLESHEETS)
|
142 |
+
def conversation_page():
|
143 |
+
state = me.state(State)
|
144 |
+
model_picker_dialog()
|
145 |
+
with me.box(style=ROOT_BOX_STYLE):
|
146 |
+
header()
|
147 |
+
|
148 |
+
models = len(state.conversations)
|
149 |
+
models_px = models * 680
|
150 |
+
with me.box(
|
151 |
+
style=me.Style(
|
152 |
+
width=f"min({models_px}px, calc(100% - 32px))",
|
153 |
+
display="grid",
|
154 |
+
gap=16,
|
155 |
+
grid_template_columns=f"repeat({models}, 1fr)",
|
156 |
+
flex_grow=1,
|
157 |
+
overflow_y="hidden",
|
158 |
+
margin=me.Margin.symmetric(horizontal="auto"),
|
159 |
+
padding=me.Padding.symmetric(horizontal=16),
|
160 |
+
)
|
161 |
+
):
|
162 |
+
for conversation in state.conversations:
|
163 |
+
model = conversation.model
|
164 |
+
messages = conversation.messages
|
165 |
+
with me.box(
|
166 |
+
style=me.Style(
|
167 |
+
overflow_y="auto",
|
168 |
+
)
|
169 |
+
):
|
170 |
+
me.text("Model: " + model, style=me.Style(font_weight=500))
|
171 |
+
|
172 |
+
for message in messages:
|
173 |
+
if message.role == "user":
|
174 |
+
user_message(message.content)
|
175 |
+
else:
|
176 |
+
model_message(message)
|
177 |
+
if messages and model == state.conversations[-1].model:
|
178 |
+
me.box(
|
179 |
+
key="end_of_messages",
|
180 |
+
style=me.Style(
|
181 |
+
margin=me.Margin(
|
182 |
+
bottom="50vh" if messages[-1].in_progress else 0
|
183 |
+
)
|
184 |
+
),
|
185 |
+
)
|
186 |
+
with me.box(
|
187 |
+
style=me.Style(
|
188 |
+
display="flex",
|
189 |
+
justify_content="center",
|
190 |
+
)
|
191 |
+
):
|
192 |
+
with me.box(
|
193 |
+
style=me.Style(
|
194 |
+
width="min(680px, 100%)",
|
195 |
+
padding=me.Padding(top=24, bottom=24),
|
196 |
+
)
|
197 |
+
):
|
198 |
+
chat_input()
|
199 |
|
200 |
+
def user_message(content: str):
|
201 |
+
with me.box(
|
202 |
+
style=me.Style(
|
203 |
+
background="#e7f2ff",
|
204 |
+
padding=me.Padding.all(16),
|
205 |
+
margin=me.Margin.symmetric(vertical=16),
|
206 |
+
border_radius=16,
|
207 |
+
)
|
208 |
+
):
|
209 |
+
me.text(content)
|
210 |
+
|
211 |
+
def model_message(message: ChatMessage):
|
212 |
+
with me.box(
|
213 |
+
style=me.Style(
|
214 |
+
background="#fff",
|
215 |
+
padding=me.Padding.all(16),
|
216 |
+
border_radius=16,
|
217 |
+
margin=me.Margin.symmetric(vertical=16),
|
218 |
+
)
|
219 |
+
):
|
220 |
me.markdown(message.content)
|
221 |
if message.in_progress:
|
222 |
me.progress_spinner()
|
223 |
|
224 |
+
|
225 |
def header():
|
226 |
+
def navigate_home(e: me.ClickEvent):
|
227 |
+
me.navigate("/")
|
228 |
+
state = me.state(State)
|
229 |
+
state.conversations = []
|
230 |
+
|
231 |
with me.box(
|
232 |
+
on_click=navigate_home,
|
233 |
style=me.Style(
|
234 |
+
cursor="pointer",
|
235 |
padding=me.Padding.all(16),
|
236 |
),
|
237 |
):
|
|
|
245 |
),
|
246 |
)
|
247 |
|
248 |
+
|
249 |
def switch_model(e: me.ClickEvent):
|
250 |
state = me.state(State)
|
251 |
state.is_model_picker_dialog_open = True
|
|
|
303 |
def send_prompt(e: me.ClickEvent):
|
304 |
state = me.state(State)
|
305 |
if not state.conversations:
|
306 |
+
me.navigate("/conversation")
|
307 |
for model in state.models:
|
308 |
state.conversations.append(Conversation(model=model, messages=[]))
|
309 |
input = state.input
|
|
|
316 |
messages.append(ChatMessage(role="user", content=input))
|
317 |
messages.append(ChatMessage(role="model", in_progress=True))
|
318 |
yield
|
319 |
+
me.scroll_into_view(key="end_of_messages")
|
320 |
if model == Models.GEMINI_1_5_FLASH.value:
|
321 |
llm_response = gemini.send_prompt_flash(input, history)
|
322 |
elif model == Models.GEMINI_1_5_PRO.value:
|
|
|
325 |
llm_response = claude.call_claude_sonnet(input, history)
|
326 |
else:
|
327 |
raise Exception("Unhandled model", model)
|
|
|
328 |
for chunk in llm_response:
|
329 |
messages[-1].content += chunk
|
330 |
yield
|
331 |
messages[-1].in_progress = False
|
332 |
yield
|
|