quizzz / app.py
sanarawal7
retry
a517614
raw
history blame
1.5 kB
import gradio as gr
import torch
from transformers import AutoTokenizer, AutoModelForQuestionAnswering
# Replace with your Hugging Face API token
hf_api_token = "HF_API_KEY"
# Load the model and tokenizer globally, not inside the function
model_name = "allenai/Molmo-7B-D-0924"
tokenizer = AutoTokenizer.from_pretrained(model_name, use_auth_token=hf_api_token)
model = AutoModelForQuestionAnswering.from_pretrained(model_name, use_auth_token=hf_api_token)
def generate_questions(file_content):
# Preprocess file content (assuming it's already text)
try:
text = file_content.decode("utf-8")
except Exception as e:
return f"Error decoding file: {str(e)}", ""
# Generate questions and answers
inputs = tokenizer(text, return_tensors="pt")
outputs = model(**inputs)
answers = tokenizer.decode(outputs.start_logits.argmax(), skip_special_tokens=True)
# Extract questions and options (basic implementation)
questions = []
options = []
for answer in answers.split("."):
if answer.startswith("Q"):
questions.append(answer.strip())
else:
options.append(answer.strip())
return questions, options
# Create Gradio interface
question_box = gr.Textbox(label="Questions")
option_box = gr.Textbox(label="Options")
iface = gr.Interface(
fn=generate_questions,
inputs=gr.File(label="Upload File"),
outputs=[question_box, option_box],
title="Question and Option Generator"
)
iface.launch()