domro11 commited on
Commit
971925a
·
1 Parent(s): 28678fe

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -11
app.py CHANGED
@@ -4,6 +4,7 @@ from stqdm import stqdm
4
  import pandas as pd
5
  from transformers import pipeline
6
  import json
 
7
 
8
 
9
 
@@ -123,20 +124,25 @@ def main():
123
  elif choice=="Question Generation":
124
  st.subheader("Question Generation")
125
  st.write(" Enter the text to get questions generated !")
126
- question_generator = pipeline(model="mrm8488/t5-base-finetuned-question-generation-ap",tokenizer="mrm8488/t5-base-finetuned-question-generation-ap")
 
 
127
  text_input2 = st.text_area("Your Text","Enter the Text to complete")
128
 
129
 
130
- if text_input2:
131
- # Extract named entities from the text
132
- entities = extract_entities(text_input2)
133
- # Generate questions based on the text using the T5 model
134
- questions = question_generator(text_input2, max_length=30)
135
- # Display the generated questions
136
- st.subheader("Generated questions")
137
- for question in questions:
138
- st.write(question["question"])
 
 
139
 
140
  #main function to run
141
  if __name__ == '__main__':
142
- main()
 
 
4
  import pandas as pd
5
  from transformers import pipeline
6
  import json
7
+ from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
8
 
9
 
10
 
 
124
  elif choice=="Question Generation":
125
  st.subheader("Question Generation")
126
  st.write(" Enter the text to get questions generated !")
127
+ # Load the T5 model and tokenizer
128
+ model = AutoModelForSeq2SeqLM.from_pretrained("mrm8488/t5-base-finetuned-question-generation-ap")
129
+ tokenizer = AutoTokenizer.from_pretrained("mrm8488/t5-base-finetuned-question-generation-ap")
130
  text_input2 = st.text_area("Your Text","Enter the Text to complete")
131
 
132
 
133
+ # Create a button to generate questions
134
+ if st.button("Generate Questions"):
135
+ #Encode the input text using the tokenizer
136
+ input_ids = tokenizer.encode("generate questions: " + input_text, return_tensors="pt")
137
+ # Use the T5 model to generate questions
138
+ question_ids = model.generate(input_ids)
139
+ # Decode the questions from the output ids using the tokenizer
140
+ questions = tokenizer.decode(question_ids[0], skip_special_tokens=True)
141
+ # Display the questions to the user
142
+ st.write("Generated Questions:")
143
+ st.write(questions)
144
 
145
  #main function to run
146
  if __name__ == '__main__':
147
+ main()
148
+