lixinhao commited on
Commit
dc9d9f3
·
verified ·
1 Parent(s): 9e6271f

Update modeling_videochat_flash.py

Browse files
Files changed (1) hide show
  1. modeling_videochat_flash.py +1 -1
modeling_videochat_flash.py CHANGED
@@ -641,7 +641,7 @@ class VideoChatFlashQwenForCausalLM(LlavaMetaForCausalLM, Qwen2ForCausalLM_Flash
641
  conv = conv_templates["qwen_2"].copy()
642
 
643
  if chat_history is None or len(chat_history) == 0:
644
- user_prompt = f'{DEFAULT_IMAGE_TOKEN}\n{time_msg.rstrip()} {user_prompt}'
645
  else:
646
  assert DEFAULT_IMAGE_TOKEN in chat_history[0]['content'], chat_history
647
  for msg in chat_history:
 
641
  conv = conv_templates["qwen_2"].copy()
642
 
643
  if chat_history is None or len(chat_history) == 0:
644
+ user_prompt = f'{DEFAULT_IMAGE_TOKEN}\n{time_msg.strip()} {user_prompt}'
645
  else:
646
  assert DEFAULT_IMAGE_TOKEN in chat_history[0]['content'], chat_history
647
  for msg in chat_history: