|
import requests |
|
from bs4 import BeautifulSoup |
|
import openai |
|
import gradio as gr |
|
import os |
|
from dotenv import load_dotenv |
|
|
|
|
|
load_dotenv() |
|
openai.api_key = os.getenv("OPENAI_API_KEY") |
|
|
|
|
|
def scrape_content(url): |
|
response = requests.get(url) |
|
soup = BeautifulSoup(response.content, 'html.parser') |
|
|
|
|
|
title = soup.find('title').get_text() |
|
paragraphs = soup.find_all('p') |
|
content = '\n'.join([para.get_text() for para in paragraphs]) |
|
|
|
return title, content |
|
|
|
|
|
def create_flashcards(content): |
|
|
|
prompt = os.getenv("FLASHCARD_PROMPT") |
|
|
|
response = openai.ChatCompletion.create( |
|
model="gpt-4o-mini", |
|
messages=[ |
|
{"role": "system", "content": prompt}, |
|
{"role": "user", "content": content} |
|
], |
|
max_tokens=500, |
|
temperature=0.7 |
|
) |
|
|
|
flashcards = response.choices[0].message.content.strip() |
|
return flashcards |
|
|
|
|
|
def process_url(url): |
|
if not url: |
|
return "No URL provided." |
|
|
|
title, content = scrape_content(url) |
|
flashcards = create_flashcards(content) |
|
|
|
|
|
file_path = "flashcards.txt" |
|
with open(file_path, "w") as file: |
|
file.write(f"Title: {title}\n\nFlashcards:\n{flashcards}") |
|
|
|
return flashcards, file_path |
|
|
|
|
|
def interface_fn(url): |
|
flashcards, file_path = process_url(url) |
|
return flashcards, file_path |
|
|
|
iface = gr.Interface( |
|
fn=interface_fn, |
|
inputs=gr.Textbox(lines=2, placeholder="Enter URL here..."), |
|
outputs=["text", gr.File(label="Download Flashcards")], |
|
title="The Federal Flash Card Generator", |
|
description="An application that helps students prepare for competitive exams like the UPSC, SSC, CLAT, MBA, etc. Enter a News Site URL to generate flashcards for exam preparation. Option to download the response as a text file." |
|
) |
|
|
|
|
|
if __name__ == "__main__": |
|
iface.launch(share=False) |
|
|