KvrParaskevi
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
import gradio as gr
|
|
|
2 |
from langchain_core.pydantic_v1 import BaseModel, Field
|
3 |
from langchain.prompts import HumanMessagePromptTemplate, ChatPromptTemplate
|
4 |
from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
|
@@ -9,6 +10,7 @@ model = AutoModelForCausalLM.from_pretrained(model_name)
|
|
9 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
10 |
|
11 |
# Define the Langchain chatbot function
|
|
|
12 |
def chatbot(message, history):
|
13 |
# Create a Langchain prompt template
|
14 |
prompt_template = HumanMessagePromptTemplate.from_message(message)
|
@@ -35,4 +37,4 @@ with gr.Blocks() as demo:
|
|
35 |
)
|
36 |
|
37 |
# Launch the Gradio app
|
38 |
-
|
|
|
1 |
import gradio as gr
|
2 |
+
import spaces
|
3 |
from langchain_core.pydantic_v1 import BaseModel, Field
|
4 |
from langchain.prompts import HumanMessagePromptTemplate, ChatPromptTemplate
|
5 |
from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
|
|
|
10 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
11 |
|
12 |
# Define the Langchain chatbot function
|
13 |
+
@spaces.GPU
|
14 |
def chatbot(message, history):
|
15 |
# Create a Langchain prompt template
|
16 |
prompt_template = HumanMessagePromptTemplate.from_message(message)
|
|
|
37 |
)
|
38 |
|
39 |
# Launch the Gradio app
|
40 |
+
demo.launch()
|