tikendraw commited on
Commit
43c4fb5
·
1 Parent(s): b1cdcf3

removed comments

Browse files
Files changed (1) hide show
  1. app/app.py +8 -9
app/app.py CHANGED
@@ -1,9 +1,9 @@
1
  import time
2
  import streamlit as st
3
- from app.utils import generate_answer, load_llm
4
  from core.types import ThoughtStepsDisplay, BigMessage
5
  from .app_config import InputConfig, ENV_FILE_PATH, CONFIG_FILE_PATH
6
- from core.prompts.cot import SYSTEM_PROMPT
7
 
8
 
9
 
@@ -46,7 +46,8 @@ def main():
46
  config = InputConfig.load(env_file=ENV_FILE_PATH, config_file=CONFIG_FILE_PATH)
47
  config = config_sidebar(config=config)
48
  llm = load_llm(config)
49
-
 
50
 
51
  current_tab='o1_tab'
52
  big_message_attr_name = f"{current_tab}_big_messages"
@@ -72,8 +73,9 @@ def main():
72
  else:
73
  print_thought(message.content, is_final=True)
74
 
75
-
76
-
 
77
  if user_input := st.chat_input("What is up bro?"):
78
  big_message_attr.append(BigMessage(role="user", content=user_input, thoughts=[]))
79
 
@@ -107,10 +109,8 @@ def main():
107
  ):
108
 
109
  thoughts.append(step)
110
-
111
- st.write(step.md())
112
- # add breakline after each step
113
  st.markdown('---')
 
114
  status.update(label=step.step_title, state="running", expanded=False)
115
 
116
 
@@ -139,7 +139,6 @@ def print_thought(thought:ThoughtStepsDisplay, is_final:bool=False):
139
  if is_final:
140
  st.markdown(thought.md())
141
  else:
142
- # st.markdown(f'\n```json\n{thought.model_dump_json()}\n```\n', unsafe_allow_html=True)
143
  with st.expander(f'{thought.step_title}'):
144
  st.markdown(thought.md())
145
 
 
1
  import time
2
  import streamlit as st
3
+ from core.generation_utils import generate_answer, load_llm
4
  from core.types import ThoughtStepsDisplay, BigMessage
5
  from .app_config import InputConfig, ENV_FILE_PATH, CONFIG_FILE_PATH
6
+ import litellm
7
 
8
 
9
 
 
46
  config = InputConfig.load(env_file=ENV_FILE_PATH, config_file=CONFIG_FILE_PATH)
47
  config = config_sidebar(config=config)
48
  llm = load_llm(config)
49
+
50
+ is_vision_llm = litellm.supports_vision(model=config.model_name) # will use it someday
51
 
52
  current_tab='o1_tab'
53
  big_message_attr_name = f"{current_tab}_big_messages"
 
73
  else:
74
  print_thought(message.content, is_final=True)
75
 
76
+
77
+
78
+
79
  if user_input := st.chat_input("What is up bro?"):
80
  big_message_attr.append(BigMessage(role="user", content=user_input, thoughts=[]))
81
 
 
109
  ):
110
 
111
  thoughts.append(step)
 
 
 
112
  st.markdown('---')
113
+ st.write(step.md())
114
  status.update(label=step.step_title, state="running", expanded=False)
115
 
116
 
 
139
  if is_final:
140
  st.markdown(thought.md())
141
  else:
 
142
  with st.expander(f'{thought.step_title}'):
143
  st.markdown(thought.md())
144