|
import gradio as gr |
|
|
|
def respond(msg, history, *args): |
|
return "response" |
|
|
|
demo = gr.ChatInterface( |
|
respond, |
|
additional_inputs=[ |
|
gr.Dropdown([ |
|
'Meta-Llama-3-70B-Instruct-Q3_K_M.gguf', |
|
'gemma-2-27b-it-Q8_0.gguf' |
|
], |
|
value="gemma-2-27b-it-Q8_0.gguf", |
|
label="Model" |
|
), |
|
gr.Textbox(value="You are a helpful assistant.", label="System message"), |
|
gr.Slider(minimum=1, maximum=4096, value=2048, step=1, label="Max tokens"), |
|
], |
|
description="Llama-cpp-agent: Chat multi llm selection", |
|
chatbot=gr.Chatbot( |
|
scale=1, |
|
placeholder="PLACEHOLDER", |
|
show_copy_button=True |
|
) |
|
) |
|
|
|
demo.launch() |