vmoras commited on
Commit
2f4ae78
·
1 Parent(s): 9a190cc

Add checks to avoid long messages and messages without a question

Browse files
Files changed (2) hide show
  1. app.py +3 -4
  2. functions.py +27 -18
app.py CHANGED
@@ -1,7 +1,7 @@
1
  from functions import *
2
 
3
 
4
- scores_parameters, authors, models, temperature_values = get_main_data()
5
 
6
  with gr.Blocks() as app:
7
  msg_history = gr.State() # Messages with the format used by OpenAI
@@ -9,7 +9,6 @@ with gr.Blocks() as app:
9
 
10
  with gr.Tab('Test Chats'):
11
  with gr.Row():
12
- model = gr.Textbox(value=models[0], label='Model', interactive=False)
13
  author = gr.Dropdown(authors, value=authors[0], label='Author', interactive=True)
14
  temperature = gr.Radio(temperature_values, label="Randomness", value=0.2)
15
  chat_btn = gr.Button(value='Start chat')
@@ -44,12 +43,12 @@ with gr.Blocks() as app:
44
 
45
  message.submit(
46
  get_answer,
47
- [message, msg_history, chatbot, waiting_time, temperature, model],
48
  [message, msg_history, chatbot, waiting_time])
49
 
50
  scores_btn.click(
51
  save_scores,
52
- [author, temperature, chatbot, waiting_time, model, opinion_box] + scores,
53
  scores_box)
54
 
55
 
 
1
  from functions import *
2
 
3
 
4
+ scores_parameters, authors, _, temperature_values = get_main_data()
5
 
6
  with gr.Blocks() as app:
7
  msg_history = gr.State() # Messages with the format used by OpenAI
 
9
 
10
  with gr.Tab('Test Chats'):
11
  with gr.Row():
 
12
  author = gr.Dropdown(authors, value=authors[0], label='Author', interactive=True)
13
  temperature = gr.Radio(temperature_values, label="Randomness", value=0.2)
14
  chat_btn = gr.Button(value='Start chat')
 
43
 
44
  message.submit(
45
  get_answer,
46
+ [message, msg_history, chatbot, waiting_time, temperature],
47
  [message, msg_history, chatbot, waiting_time])
48
 
49
  scores_btn.click(
50
  save_scores,
51
+ [author, temperature, chatbot, waiting_time, opinion_box] + scores,
52
  scores_box)
53
 
54
 
functions.py CHANGED
@@ -12,7 +12,7 @@ from huggingface_hub import hf_hub_download, HfApi
12
  def get_main_data():
13
  """
14
  Initializes the key for the api and returns the parameters for the scores, name of the possible authors,
15
- models and possible temperature values
16
  """
17
  openai.api_key = os.environ.get('API_KEY')
18
 
@@ -22,10 +22,10 @@ def get_main_data():
22
  ]
23
 
24
  authors = ['Sofia', 'Eliza', 'Sindy', 'Carlos', 'Andres', 'Adriana', 'Carolina', 'Valeria']
25
- models = ["gpt-4"]
26
- temperature_values = [0.2, 0.8, 1.0]
27
 
28
- return scores_parameters, authors, models, temperature_values
29
 
30
 
31
  def innit_bot():
@@ -58,19 +58,19 @@ def make_noninteractive():
58
  gr.Radio.update(interactive=False))
59
 
60
 
61
- def call_api(model: gr.Dropdown, msg_history: gr.State, temperature: gr.State):
62
  """
63
  Returns the API's response
64
  """
65
  response = openai.ChatCompletion.create(
66
- model=model,
67
  messages=msg_history,
68
  temperature=temperature
69
  )
70
  return response
71
 
72
 
73
- def handle_call(model: gr.Dropdown, msg_history: gr.State, temperature: gr.State):
74
  """
75
  Returns the response and waiting time of the AI. It also handles the possible errors
76
  """
@@ -79,7 +79,7 @@ def handle_call(model: gr.Dropdown, msg_history: gr.State, temperature: gr.State
79
  while True:
80
  try:
81
  start_time = time.time()
82
- response = call_api(model, msg_history, temperature)
83
  end_time = time.time()
84
  break
85
 
@@ -103,13 +103,13 @@ def handle_call(model: gr.Dropdown, msg_history: gr.State, temperature: gr.State
103
  return response, needed_time
104
 
105
 
106
- def get_ai_answer(msg: str, model: gr.Dropdown, msg_history: gr.State, temperature: gr.State):
107
  """
108
  Returns the response given by the model, all the message history so far and the seconds
109
- the api took to retrieve such response. Both depend on the model
110
  """
111
  msg_history.append({"role": "user", "content": msg})
112
- response, needed_time = handle_call(model, msg_history, temperature)
113
  AI_response = response["choices"][0]["message"]["content"]
114
  msg_history.append({'role': 'assistant', 'content': AI_response})
115
 
@@ -117,9 +117,8 @@ def get_ai_answer(msg: str, model: gr.Dropdown, msg_history: gr.State, temperatu
117
 
118
 
119
  def get_answer(
120
- msg: str, msg_history: gr.State,
121
- chatbot_history: gr.Chatbot, waiting_time: gr.State,
122
- temperature: gr.State, model: gr.Dropdown):
123
  """
124
  Cleans msg box, adds the new message to the message history,
125
  gets the answer from the bot and adds it to the chatbot history
@@ -127,7 +126,18 @@ def get_answer(
127
  """
128
 
129
  # Get bot answer (output), messages history and waiting time
130
- AI_response, msg_history, needed_time = get_ai_answer(msg, model, msg_history, temperature)
 
 
 
 
 
 
 
 
 
 
 
131
 
132
  # Save waiting time
133
  waiting_time.append(needed_time)
@@ -140,13 +150,12 @@ def get_answer(
140
 
141
  def save_scores(
142
  author: gr.Dropdown, temperature: gr.State,
143
- history: gr.Chatbot, waiting_time: gr.State,
144
- model: gr.Dropdown, opinion: gr.Textbox, *score_values):
145
  """
146
  Saves the scores and chat's info into the json file
147
  """
148
  # Get the parameters for each score
149
- score_parameters, _, _, _ = get_main_data()
150
 
151
  # Get the score of each parameter
152
  scores = dict()
 
12
  def get_main_data():
13
  """
14
  Initializes the key for the api and returns the parameters for the scores, name of the possible authors,
15
+ model used and possible temperature values
16
  """
17
  openai.api_key = os.environ.get('API_KEY')
18
 
 
22
  ]
23
 
24
  authors = ['Sofia', 'Eliza', 'Sindy', 'Carlos', 'Andres', 'Adriana', 'Carolina', 'Valeria']
25
+ model = "gpt-4"
26
+ temperature_values = [0.8, 1.0, 1.2]
27
 
28
+ return scores_parameters, authors, model, temperature_values
29
 
30
 
31
  def innit_bot():
 
58
  gr.Radio.update(interactive=False))
59
 
60
 
61
+ def call_api(msg_history: gr.State, temperature: gr.State):
62
  """
63
  Returns the API's response
64
  """
65
  response = openai.ChatCompletion.create(
66
+ model="gpt-4",
67
  messages=msg_history,
68
  temperature=temperature
69
  )
70
  return response
71
 
72
 
73
+ def handle_call(msg_history: gr.State, temperature: gr.State):
74
  """
75
  Returns the response and waiting time of the AI. It also handles the possible errors
76
  """
 
79
  while True:
80
  try:
81
  start_time = time.time()
82
+ response = call_api(msg_history, temperature)
83
  end_time = time.time()
84
  break
85
 
 
103
  return response, needed_time
104
 
105
 
106
+ def get_ai_answer(msg: str, msg_history: gr.State, temperature: gr.State):
107
  """
108
  Returns the response given by the model, all the message history so far and the seconds
109
+ the api took to retrieve such response.
110
  """
111
  msg_history.append({"role": "user", "content": msg})
112
+ response, needed_time = handle_call(msg_history, temperature)
113
  AI_response = response["choices"][0]["message"]["content"]
114
  msg_history.append({'role': 'assistant', 'content': AI_response})
115
 
 
117
 
118
 
119
  def get_answer(
120
+ msg: str, msg_history: gr.State, chatbot_history: gr.Chatbot,
121
+ waiting_time: gr.State, temperature: gr.State):
 
122
  """
123
  Cleans msg box, adds the new message to the message history,
124
  gets the answer from the bot and adds it to the chatbot history
 
126
  """
127
 
128
  # Get bot answer (output), messages history and waiting time
129
+ AI_response, msg_history, needed_time = get_ai_answer(msg, msg_history, temperature)
130
+
131
+ # Make sure the AI_response is short, if not make it shorter
132
+ if len(AI_response) > 260:
133
+ new_msg = 'El mensaje esta muy largo. Da la misma idea (mandando el link, pregunta y/o promocion que hayas ' \
134
+ 'dado) pero usando 40 palabras.'
135
+ AI_response, msg_history, needed_time = get_ai_answer(new_msg, msg_history, temperature)
136
+
137
+ # Make sure the AI_response has at least one question
138
+ if '?' not in AI_response:
139
+ new_msg = 'Incluye 1 pregunta dentro del mensaje. Puede estar relacionada a lo que se hablo antes o algo nuevo.'
140
+ AI_response, msg_history, needed_time = get_ai_answer(new_msg, msg_history, temperature)
141
 
142
  # Save waiting time
143
  waiting_time.append(needed_time)
 
150
 
151
  def save_scores(
152
  author: gr.Dropdown, temperature: gr.State,
153
+ history: gr.Chatbot, waiting_time: gr.State, opinion: gr.Textbox, *score_values):
 
154
  """
155
  Saves the scores and chat's info into the json file
156
  """
157
  # Get the parameters for each score
158
+ score_parameters, _, model, _ = get_main_data()
159
 
160
  # Get the score of each parameter
161
  scores = dict()