khang119966 commited on
Commit
2d5847a
·
verified ·
1 Parent(s): 2a4bdfb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -8
app.py CHANGED
@@ -118,7 +118,7 @@ def chat(message, history):
118
  We currently only support one image at the start of the context! Please start a new conversation."""
119
 
120
  if len(history) == 0 and len(message["files"]) != 0:
121
- test_image = message["files"][0]["path"]
122
  pixel_values = load_image(test_image, max_num=6).to(torch.bfloat16).cuda()
123
  elif len(history) == 0 and len(message["files"]) == 0:
124
  pixel_values = None
@@ -157,13 +157,13 @@ We currently only support one image at the start of the context! Please start a
157
 
158
  print(f'User: {question}\nAssistant: {response}')
159
 
160
- return response
161
- # buffer = ""
162
- # for new_text in response:
163
- # buffer += new_text
164
- # generated_text_without_prompt = buffer[:]
165
- # time.sleep(0.005)
166
- # yield generated_text_without_prompt
167
 
168
  CSS ="""
169
  # @media only screen and (max-width: 600px){
 
118
  We currently only support one image at the start of the context! Please start a new conversation."""
119
 
120
  if len(history) == 0 and len(message["files"]) != 0:
121
+ test_image = message["files"][0]
122
  pixel_values = load_image(test_image, max_num=6).to(torch.bfloat16).cuda()
123
  elif len(history) == 0 and len(message["files"]) == 0:
124
  pixel_values = None
 
157
 
158
  print(f'User: {question}\nAssistant: {response}')
159
 
160
+ # return response
161
+ buffer = ""
162
+ for new_text in response:
163
+ buffer += new_text
164
+ generated_text_without_prompt = buffer[:]
165
+ time.sleep(0.005)
166
+ yield generated_text_without_prompt
167
 
168
  CSS ="""
169
  # @media only screen and (max-width: 600px){