import gradio as gr from transformers import AutoTokenizer, AutoModelForCausalLM import feedparser import threading from datetime import datetime import json import os # Initialize Llama 2 model and tokenizer model_name = "meta-llama-2-1b" # Replace with your actual model name tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForCausalLM.from_pretrained(model_name) def fetch_news_from_rss(interests): """Fetch news from RSS feeds based on interests""" # Example RSS feeds - you can expand this list rss_feeds = { "Technology": "https://feeds.feedburner.com/TechCrunch", "Business": "https://feeds.feedburner.com/BusinessInsider", "Sports": "https://www.espn.com/espn/rss/news", } articles = [] for interest in interests: if interest in rss_feeds: feed = feedparser.parse(rss_feeds[interest]) articles.extend(feed.entries[:3]) # Get top 3 articles per interest return articles def generate_summary(text, language="English"): """Generate summary using Llama 2""" prompt = f"""Please provide a concise summary of the following news article in {language}. Focus on the main points and key details: Article: {text} Summary:""" inputs = tokenizer(prompt, return_tensors="pt", max_length=512, truncation=True) outputs = model.generate( inputs["input_ids"], max_length=150, min_length=50, temperature=0.7, num_return_sequences=1 ) summary = tokenizer.decode(outputs[0], skip_special_tokens=True) return summary.split("Summary:")[1].strip() def save_user_preferences(name, language, interests): """Save user preferences to a JSON file""" if not name or not language or not interests: return "Please fill in all fields!" preferences = { "name": name, "language": language, "interests": interests, "last_updated": datetime.now().isoformat() } with open(f"preferences_{name}.json", "w") as f: json.dump(preferences, f) return f"Preferences saved for {name}!" def get_personalized_summary(name): """Get personalized news summary based on user preferences""" try: with open(f"preferences_{name}.json", "r") as f: preferences = json.load(f) except FileNotFoundError: return "Please set your preferences first!" # Fetch news based on interests articles = fetch_news_from_rss(preferences["interests"]) summaries = [] for article in articles: title = article.get("title", "") content = article.get("description", "") # Generate summary using Llama 2 summary = generate_summary( content, language=preferences["language"] ) summaries.append(f"📰 {title}\n\n{summary}\n\n---") return "\n".join(summaries) # Create Gradio interface with gr.Blocks(title="Llama 2 News Summarizer") as demo: gr.Markdown("# 📰 AI News Summarizer powered by Llama 2") with gr.Tab("Set Preferences"): name_input = gr.Textbox(label="Your Name") language_dropdown = gr.Dropdown( choices=["English", "Spanish", "French", "Arabic"], label="Preferred Language" ) interests_checkboxes = gr.CheckboxGroup( choices=["Technology", "Business", "Sports", "Science", "Politics"], label="News Interests" ) save_button = gr.Button("Save Preferences") preferences_output = gr.Textbox(label="Status") save_button.click( save_user_preferences, inputs=[name_input, language_dropdown, interests_checkboxes], outputs=[preferences_output] ) with gr.Tab("Get News Summary"): name_check = gr.Textbox(label="Enter your name to get summary") get_summary_button = gr.Button("Get Summary") summary_output = gr.Textbox( label="Your Personalized News Summary", lines=10 ) get_summary_button.click( get_personalized_summary, inputs=[name_check], outputs=[summary_output] ) if __name__ == "__main__": demo.launch()