import feedparser
import time
import logging
from datetime import datetime
from db.etcd import EtcdClient

class RSSParser:
    def __init__(self, config, etcd_host, etcd_port, parse_only_new_items, cut_off_seconds):
        self.config = config
        self.etcd_client = EtcdClient(host=etcd_host, port=etcd_port)
        self.parse_only_new_items = parse_only_new_items
        self.startup_timestamp = int(time.time()) - cut_off_seconds if not self.parse_only_new_items else int(time.time())
        self.parsed_items = set()  # Set to store parsed item identifiers
        logging.basicConfig(level=self.get_log_level(config.log_level))
        self.logger = logging.getLogger(__name__)
    
    def get_log_level(self, level_str):
        level_str = level_str.upper()
        return getattr(logging, level_str, logging.INFO)
    
    def parse_and_store(self):
        for feed in self.config.feeds:
            feed_data = feedparser.parse(feed.rss)
            for entry in feed_data.entries:
                timestamp = int(time.mktime(entry.published_parsed))
                
                # Skip news items created before the startup timestamp or cutoff timestamp
                if timestamp < self.startup_timestamp:
                    continue
                
                # Use the entry link as a unique identifier
                entry_id = entry.link
                if entry_id in self.parsed_items:
                    self.logger.info(f"Skipping duplicate entry: {entry.title}")
                    continue
                
                readable_timestamp = datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S')
                categories = [t.get('term') for t in entry.tags] if 'tags' in entry else ['Uncategorized']
                content = entry.get('content', [{'value': ''}])[0]['value'].lower()  # Get content if available and convert to lowercase
                base_path = f"/feeds/{feed.name}/{timestamp}"
                
                self.etcd_client.put(f"{base_path}/title", entry.title)
                self.etcd_client.put(f"{base_path}/link", entry.link)
                self.etcd_client.put(f"{base_path}/category", ', '.join(categories))
                self.etcd_client.put(f"{base_path}/content", content)
                
                self.logger.info(f"Parsed news from {feed.name}: Title='{entry.title}', Timestamp='{readable_timestamp}', Categories='{', '.join(categories)}'")
                
                # Add the entry ID to the set of parsed items
                self.parsed_items.add(entry_id)

    def run(self):
        while True:
            self.parse_and_store()
            time.sleep(self.config.scrape_interval_seconds)
