alexkueck commited on
Commit
0d500dc
·
1 Parent(s): 2edcc3d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -294,13 +294,15 @@ def generate(text, history, rag_option, model_option, temperature=0.5, max_new_
294
  #inference allg:
295
  data = {
296
  "inputs": prompt,
297
- "options": {"max_new_tokens": 1024},
298
  }
299
  response= requests.post(API_URL, headers=HEADERS, json=data)
300
  result = response.json()
301
  print("result:------------------")
302
  chatbot_response = result[0]['generated_text']
303
  print(chatbot_response)
 
 
304
  except Exception as e:
305
  raise gr.Error(e)
306
 
@@ -309,7 +311,7 @@ def generate(text, history, rag_option, model_option, temperature=0.5, max_new_
309
 
310
  #Antwort als Stream ausgeben...
311
  for i in range(len(chatbot_message)):
312
- time.sleep(0.05)
313
  yield chatbot_message[: i+1]
314
 
315
 
 
294
  #inference allg:
295
  data = {
296
  "inputs": prompt,
297
+ "options": {"max_new_tokens": max_new_tokens},
298
  }
299
  response= requests.post(API_URL, headers=HEADERS, json=data)
300
  result = response.json()
301
  print("result:------------------")
302
  chatbot_response = result[0]['generated_text']
303
  print(chatbot_response)
304
+ print("anzahl tokens gesamt antwort:------------------")
305
+ print (len(response.split()))
306
  except Exception as e:
307
  raise gr.Error(e)
308
 
 
311
 
312
  #Antwort als Stream ausgeben...
313
  for i in range(len(chatbot_message)):
314
+ time.sleep(0.03)
315
  yield chatbot_message[: i+1]
316
 
317