tatihden commited on
Commit
99223bb
·
verified ·
1 Parent(s): b68cb23

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -7
app.py CHANGED
@@ -29,7 +29,7 @@ def format_prompt(message, history):
29
  return prompt
30
 
31
 
32
- def chat_inf(system_prompt, prompt, history, temp, tokens, top_p, client_choice):
33
  client = clients[int(client_choice) - 1]
34
  if not history:
35
  history = []
@@ -38,17 +38,15 @@ def chat_inf(system_prompt, prompt, history, temp, tokens, top_p, client_choice)
38
  hist_len = len(history)
39
  print(hist_len)
40
 
41
- generate_kwargs = dict(
42
- temperature=temp,
43
- max_new_tokens=tokens,
44
- top_p=top_p,
45
- )
46
  formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
47
  stream = client.generate(formatted_prompt)
48
  output = ""
49
 
50
  for response in stream:
51
- output += response
52
  yield [(prompt, output)]
53
  history.append((prompt, output))
54
  yield history
 
29
  return prompt
30
 
31
 
32
+ def chat_inf(system_prompt, prompt, history, temp, tokens, top_p, seed, client_choice):
33
  client = clients[int(client_choice) - 1]
34
  if not history:
35
  history = []
 
38
  hist_len = len(history)
39
  print(hist_len)
40
 
41
+ sampler = keras_nlp.samplers.TopKSampler(k=5, seed=seed, top_p=top_p, temperature=temp)
42
+ client.compile(sampler=sampler)
43
+
 
 
44
  formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
45
  stream = client.generate(formatted_prompt)
46
  output = ""
47
 
48
  for response in stream:
49
+ output += response.str(tokens)
50
  yield [(prompt, output)]
51
  history.append((prompt, output))
52
  yield history