Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,147 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from openai import OpenAI
|
3 |
+
import time
|
4 |
+
import subprocess
|
5 |
+
|
6 |
+
gpt_api_key = os.environ['OPENAI_API_KEY']
|
7 |
+
sys_prompt = os.environ['SYSTEM_PROMPT']
|
8 |
+
|
9 |
+
client = OpenAI(api_key=gpt_api_key)
|
10 |
+
model_name = "ft:gpt-3.5-turbo-1106:personal::8oksosTP"
|
11 |
+
|
12 |
+
###ํจ์
|
13 |
+
#์ด์์คํดํธ ์๋ต ์์ฑ ํจ์
|
14 |
+
def assistant_choice(assistantid,text):
|
15 |
+
# assistant retrieve ๋ถ๋ฌ์ค๊ธฐ
|
16 |
+
assistant = client.beta.assistants.retrieve(assistantid)
|
17 |
+
|
18 |
+
# message thread ๋ง๋ค๊ธฐ
|
19 |
+
thread = client.beta.threads.create()
|
20 |
+
|
21 |
+
# message ์ถ๊ฐํ๊ธฐ
|
22 |
+
message = client.beta.threads.messages.create(
|
23 |
+
thread_id=thread.id,
|
24 |
+
role="user",
|
25 |
+
content=text,
|
26 |
+
)
|
27 |
+
|
28 |
+
# ์คํํ๊ธฐ
|
29 |
+
run = client.beta.threads.runs.create(
|
30 |
+
thread_id=thread.id,
|
31 |
+
assistant_id=assistant.id,
|
32 |
+
)
|
33 |
+
|
34 |
+
# ๊ธฐ๋ค๋ฆฌ๊ธฐ
|
35 |
+
print("run์ ์คํํฉ๋๋ค.")
|
36 |
+
while True:
|
37 |
+
if run.status == "completed":
|
38 |
+
break
|
39 |
+
run = client.beta.threads.runs.retrieve(thread_id=thread.id, run_id=run.id)
|
40 |
+
# print(run)
|
41 |
+
# print("์คํ ์ค...")
|
42 |
+
time.sleep(5)
|
43 |
+
|
44 |
+
# ๊ฒฐ๊ณผ ์ถ๋ ฅํ๊ธฐ
|
45 |
+
messages = client.beta.threads.messages.list(thread_id=thread.id)
|
46 |
+
gpt_answer = messages.data[0].content[0].text.value
|
47 |
+
|
48 |
+
return gpt_answer
|
49 |
+
|
50 |
+
#gpt์๋ต ์์ฑ ํจ์
|
51 |
+
def fine_function_gpt(state, state_chatbot, text):
|
52 |
+
if text=="":
|
53 |
+
return state, state_chatbot, state_chatbot
|
54 |
+
|
55 |
+
messages = state + [{'role': 'user','content': text}]
|
56 |
+
|
57 |
+
res =client.chat.completions.create(
|
58 |
+
model=model_name,
|
59 |
+
messages=messages,
|
60 |
+
temperature=0.6,
|
61 |
+
top_p=1,
|
62 |
+
frequency_penalty=0,
|
63 |
+
presence_penalty=0
|
64 |
+
)
|
65 |
+
|
66 |
+
msg = res.choices[0].message.content
|
67 |
+
|
68 |
+
print(msg)
|
69 |
+
|
70 |
+
if msg == "์ปคํผ ๊ตฌ๋งค์ต์
":
|
71 |
+
msg = assistant_choice("asst_ni23TayvHjxOarAxVTDODm91",text)
|
72 |
+
|
73 |
+
elif msg == "๋ธ๋ผ์ธ๋ ๊ตฌ๋งค์ต์
":
|
74 |
+
msg = assistant_choice("asst_qc85Yh4LjKqWMxlDakHcXsFs",text)
|
75 |
+
|
76 |
+
new_state = [{'role': 'user','content': text},
|
77 |
+
{'role': 'assistant','content': msg}]
|
78 |
+
|
79 |
+
state = state + new_state # ๋ด๋ถ์ ์ผ๋ก ๊ธฐ๋ฅ
|
80 |
+
state_chatbot = state_chatbot + [(text, msg)] # ํ๋ฉด์ ๋ณด์ด๋ ๊ธฐ๋ฅ
|
81 |
+
print(state)
|
82 |
+
|
83 |
+
return state, state_chatbot, state_chatbot
|
84 |
+
|
85 |
+
#์๋ด ์์ ๋ฒํผ
|
86 |
+
def start_def(m):
|
87 |
+
if m == []:
|
88 |
+
return [],[],[("์๋ด์ ์งํํ ์ ํ์ ์ ํํด์ฃผ์ธ์!","")]
|
89 |
+
else:
|
90 |
+
text=f"{m} ๋ฌธ์๋๋ฆฝ๋๋ค."
|
91 |
+
message = [{'role': 'system','content': sys_prompt},
|
92 |
+
{'role': 'user','content': text},
|
93 |
+
{'role': 'assistant','content': "์๋
ํ์ธ์ ๊ณ ๊ฐ๋~ ๋ฌด์์ ๋์๋๋ฆด๊น์?"} ]
|
94 |
+
|
95 |
+
state_chatbot = [("...", "์๋
ํ์ธ์. ๊ณ ๊ฐ๋! ๋ฌด์์ ๋์๋๋ฆด๊น์?")] # ํํ
|
96 |
+
return message,state_chatbot,state_chatbot
|
97 |
+
|
98 |
+
with gr.Blocks(theme='JohnSmith9982/small_and_pretty') as demo:
|
99 |
+
state = gr.State([]) #gpt ํธ์ถ ์ ์ฌ์ฉํ๋ message
|
100 |
+
state_chatbot = gr.State([]) #gr.chatbot์ ์ํ๋ฅผ ์
๋ก๋ํ๋ ์ฑํ
๊ธฐ๋ก
|
101 |
+
|
102 |
+
with gr.Row():
|
103 |
+
gr.HTML("""<div style="text-align: center; max-width: 550px; margin: 0 auto;">
|
104 |
+
<div>
|
105 |
+
<p style="font-size: 40px; font-weight: bold;">๋ง๋ฆฌํ์ฐ์ค ์ฑ๋ด</p>
|
106 |
+
</div>
|
107 |
+
</div>""")
|
108 |
+
|
109 |
+
with gr.Row():
|
110 |
+
with gr.Column(scale=6):
|
111 |
+
md = gr.Dropdown(["์ปคํผ","๋ธ๋ผ์ธ๋"],type="value",label="์ ํ ์ ํ", info="์ํ๋ ์๋ด ์ ํ์ ๊ณจ๋ผ์ฃผ์ธ์!")
|
112 |
+
with gr.Column(scale=4):
|
113 |
+
gr.HTML("""<div style="text-align: center;">
|
114 |
+
<div>
|
115 |
+
<p style="font-size: 15px; font-weight: bold;">ํ์ธ</p>
|
116 |
+
</div>
|
117 |
+
</div>""")
|
118 |
+
start_btn = gr.Button("์์ํ๊ธฐ")
|
119 |
+
with gr.Column(scale=1):
|
120 |
+
gr.HTML("""<div style="text-align: center;">
|
121 |
+
<div>
|
122 |
+
<p style="font-size: 15px; font-weight: bold;">๋คํฌ๋ชจ๋ ์ ํ</p>
|
123 |
+
</div>
|
124 |
+
</div>""")
|
125 |
+
toggle_dark = gr.Button(value="Toggle Dark")
|
126 |
+
toggle_dark.click(None,
|
127 |
+
js="""() => {
|
128 |
+
document.body.classList.toggle('dark');
|
129 |
+
document.querySelector('gradio-container').style.backgroundColor = 'var(--color-background-primary)'
|
130 |
+
}
|
131 |
+
"""
|
132 |
+
)
|
133 |
+
|
134 |
+
with gr.Row():
|
135 |
+
with gr.Column(scale=1):
|
136 |
+
chatbot = gr.Chatbot(elem_id='๋ฉ์ธ์ง์ฐฝ',height=650)
|
137 |
+
txt = gr.Textbox(show_label=False,placeholder='๋ฉ์ธ์ง๋ฅผ ์
๋ ฅํด์ฃผ์ธ์!')
|
138 |
+
txt_btn = gr.Button("๋ฉ์ธ์ง ๋ณด๋ด๊ธฐ")
|
139 |
+
|
140 |
+
txt_btn.click(fine_function_gpt, [state, state_chatbot, txt], [state, state_chatbot, chatbot])
|
141 |
+
txt_btn.click(lambda: '', None, txt)
|
142 |
+
txt.submit(fine_function_gpt, [state, state_chatbot, txt], [state, state_chatbot, chatbot]) # ํ
์คํธ ๋ฐ์ค์์ enter์ ์ ์ก
|
143 |
+
txt.submit(lambda: '', None, txt)
|
144 |
+
|
145 |
+
start_btn.click(start_def, inputs= [md], outputs = [state, state_chatbot, chatbot])
|
146 |
+
|
147 |
+
demo.launch(debug=True)
|