Spaces:
Sleeping
Sleeping
import requests | |
from bs4 import BeautifulSoup | |
import openai | |
import gradio as gr | |
import os | |
from dotenv import load_dotenv | |
# Load environment variables from .env file | |
load_dotenv() | |
openai.api_key = os.getenv("OPENAI_API_KEY") | |
# Function to scrape content from a URL | |
def scrape_content(url): | |
response = requests.get(url) | |
soup = BeautifulSoup(response.content, 'html.parser') | |
# Example of extracting title and body content - modify based on actual structure of the websites | |
title = soup.find('title').get_text() | |
paragraphs = soup.find_all('p') | |
content = '\n'.join([para.get_text() for para in paragraphs]) | |
return title, content | |
# Function to summarize content using OpenAI | |
def summarize_content(content): | |
prompt = f"Summarize the following news article in about 100 words:\n\n{content}\n\n" | |
response = openai.chat.completions.create( | |
model="gpt-4o-mini", | |
messages=[ | |
{"role": "system", "content": "You are a helpful assistant that summarizes news articles in about 60 words."}, | |
{"role": "user", "content": prompt} | |
], | |
max_tokens=300, | |
temperature=0.2 | |
) | |
summary = response.choices[0].message.content.strip() | |
return summary | |
# Function to process a single URL and generate a summary | |
def process_url(url): | |
if not url: | |
return "No URL provided." | |
title, content = scrape_content(url) | |
summary = summarize_content(content) | |
return f"Title: {title}\n\nSummary:\n{summary}" | |
# Gradio interface | |
iface = gr.Interface( | |
fn=process_url, | |
inputs=gr.Textbox(lines=2, placeholder="Enter URL here..."), | |
outputs="text", | |
title="News Article Summarizer", | |
description="Enter a News Site URL to generate a 100-word summary." | |
) | |
# Launch the interface | |
if __name__ == "__main__": | |
iface.launch(share=True) | |