Update functions.py
Browse files- functions.py +8 -2
functions.py
CHANGED
@@ -498,21 +498,27 @@ def generate_eval(raw_text, N, chunk):
|
|
498 |
|
499 |
# raw_text = ','.join(raw_text)
|
500 |
|
501 |
-
st.
|
|
|
|
|
502 |
n = len(raw_text)
|
503 |
starting_indices = [random.randint(0, n-chunk) for _ in range(N)]
|
504 |
sub_sequences = [raw_text[i:i+chunk] for i in starting_indices]
|
505 |
chain = QAGenerationChain.from_llm(ChatOpenAI(temperature=0))
|
506 |
eval_set = []
|
507 |
for i, b in enumerate(sub_sequences):
|
|
|
508 |
try:
|
509 |
qa = chain.run(b)
|
510 |
eval_set.append(qa)
|
511 |
-
|
512 |
except Exception as e:
|
513 |
st.warning('Error generating question %s.' % str(i+1), icon="⚠️")
|
514 |
#st.write(e)
|
515 |
eval_set_full = list(itertools.chain.from_iterable(eval_set))
|
|
|
|
|
|
|
516 |
return eval_set_full
|
517 |
|
518 |
@st.cache_resource
|
|
|
498 |
|
499 |
# raw_text = ','.join(raw_text)
|
500 |
|
501 |
+
update = st.empty()
|
502 |
+
|
503 |
+
update.info("`Generating sample questions ...`")
|
504 |
n = len(raw_text)
|
505 |
starting_indices = [random.randint(0, n-chunk) for _ in range(N)]
|
506 |
sub_sequences = [raw_text[i:i+chunk] for i in starting_indices]
|
507 |
chain = QAGenerationChain.from_llm(ChatOpenAI(temperature=0))
|
508 |
eval_set = []
|
509 |
for i, b in enumerate(sub_sequences):
|
510 |
+
ques_update = st.empty()
|
511 |
try:
|
512 |
qa = chain.run(b)
|
513 |
eval_set.append(qa)
|
514 |
+
ques_update.info("Creating Question:",i+1)
|
515 |
except Exception as e:
|
516 |
st.warning('Error generating question %s.' % str(i+1), icon="⚠️")
|
517 |
#st.write(e)
|
518 |
eval_set_full = list(itertools.chain.from_iterable(eval_set))
|
519 |
+
|
520 |
+
update.empty()
|
521 |
+
|
522 |
return eval_set_full
|
523 |
|
524 |
@st.cache_resource
|