Spaces:
Sleeping
Sleeping
File size: 1,838 Bytes
f0615b8 a0df58a f0615b8 aecbd98 f0615b8 a0df58a f0615b8 bcbff1b f0615b8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 |
import requests
from bs4 import BeautifulSoup
import openai
import gradio as gr
import os
from dotenv import load_dotenv
# Load environment variables from .env file
load_dotenv()
openai.api_key = os.getenv("OPENAI_API_KEY")
# Function to scrape content from a URL
def scrape_content(url):
response = requests.get(url)
soup = BeautifulSoup(response.content, 'html.parser')
# Example of extracting title and body content - modify based on actual structure of the websites
title = soup.find('title').get_text()
paragraphs = soup.find_all('p')
content = '\n'.join([para.get_text() for para in paragraphs])
return title, content
# Function to summarize content using OpenAI
def summarize_content(content):
prompt = f"Summarize the following news article in about 100 words:\n\n{content}\n\n"
response = openai.chat.completions.create(
model="gpt-4o-mini",
messages=[
{"role": "system", "content": "You are a helpful assistant that summarizes news articles in about 60 words."},
{"role": "user", "content": prompt}
],
max_tokens=300,
temperature=0.2
)
summary = response.choices[0].message.content.strip()
return summary
# Function to process a single URL and generate a summary
def process_url(url):
if not url:
return "No URL provided."
title, content = scrape_content(url)
summary = summarize_content(content)
return f"Title: {title}\n\nSummary:\n{summary}"
# Gradio interface
iface = gr.Interface(
fn=process_url,
inputs=gr.Textbox(lines=2, placeholder="Enter URL here..."),
outputs="text",
title="News Article Summarizer",
description="Enter a News Site URL to generate a 100-word summary."
)
# Launch the interface
if __name__ == "__main__":
iface.launch(share=True)
|