AminFaraji commited on
Commit
6fa1e27
·
verified ·
1 Parent(s): c776427

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -0
app.py CHANGED
@@ -171,6 +171,7 @@ def get_llama_response(message: str, history: list) -> str:
171
 
172
  query = prompt_template.format(query=message, context=context_text)
173
  #query=query.format(context=context_text,question=message)
 
174
  sequences = llama_pipeline(
175
  query,
176
  do_sample=True,
@@ -180,10 +181,12 @@ def get_llama_response(message: str, history: list) -> str:
180
  max_length=1024,
181
  )
182
 
 
183
  generated_text = sequences[0]['generated_text']
184
  response = generated_text[len(query):] # Remove the prompt from the output
185
 
186
  print("Chatbot:", response.strip())
 
187
  return response.strip()
188
 
189
 
 
171
 
172
  query = prompt_template.format(query=message, context=context_text)
173
  #query=query.format(context=context_text,question=message)
174
+ print('im gonna generate response')
175
  sequences = llama_pipeline(
176
  query,
177
  do_sample=True,
 
181
  max_length=1024,
182
  )
183
 
184
+ print('igenerated response')
185
  generated_text = sequences[0]['generated_text']
186
  response = generated_text[len(query):] # Remove the prompt from the output
187
 
188
  print("Chatbot:", response.strip())
189
+ print('i wanna return')
190
  return response.strip()
191
 
192