import logging
import json
import requests
from celery import Celery
from requests.exceptions import ConnectionError
from pipeline.utils import load_config, analyze_sentiment, get_feeds, etcd_client, redis_client
from parsers.adaptive_feed_parser import adaptive_feed_parser
from sumy.parsers.plaintext import PlaintextParser
from sumy.nlp.tokenizers import Tokenizer
from sumy.summarizers.lsa import LsaSummarizer

# Load configurations
fnsa_inference_config, etcd_host, etcd_port, redis_host, redis_port, redis_db = load_config()

# Initialize logger
logging.basicConfig(level=fnsa_inference_config.log_level)
logger = logging.getLogger(__name__)

# Initialize Celery
app = Celery('fnsa_inference_manager', broker=f'redis://{redis_host}:{redis_port}/{redis_db}')

# Apply Celery configuration
app.conf.update(
    worker_hijack_root_logger=fnsa_inference_config.celery.worker_hijack_root_logger,
    broker_connection_retry_on_startup=fnsa_inference_config.celery.broker_connection_retry_on_startup,
    worker_concurrency=fnsa_inference_config.celery.worker_concurrency
)

def summarize_text(text, sentences_count=5):
    parser = PlaintextParser.from_string(text, Tokenizer("english"))
    summarizer = LsaSummarizer()
    summary = summarizer(parser.document, sentences_count)
    return " ".join([str(sentence) for sentence in summary])

@app.task
def process_feeds(client_type='etcd'):
    logger.info("Starting feed processing...")
    
    # Fetch feeds from ETCD or Redis
    feeds = get_feeds(client_type)
    feed_items = {}

    # Group feed data into items
    for key, value in feeds.items():
        parts = key.split('/')
        site, timestamp, data_type = parts[2], parts[3], parts[4]
        item_key = f"{site}/{timestamp}"
        if item_key not in feed_items:
            feed_items[item_key] = {}
        feed_items[item_key][data_type] = value

    for item_key, item in feed_items.items():
        processed_label_key = f'/feeds/{item_key}/processed/label'
        
        # Check if feed is already processed
        if client_type == 'etcd':
            if etcd_client.get(processed_label_key):
                logger.info(f"Feed {item_key} already processed. Skipping...")
                continue
        elif client_type == 'redis':
            if redis_client.get(processed_label_key):
                logger.info(f"Feed {item_key} already processed. Skipping...")
                continue
        
        content = item.get('content')
        link = item.get('link')
        timestamp = item_key.split('/')[-1]
        if content and link:
            # Use the adaptive feed parser to extract core content
            core_content = adaptive_feed_parser(content).lower()

            aliases_existing = False
            found_stocks = []

            for stock in fnsa_inference_config.stocks:
                lower_aliases = [alias.lower() for alias in stock.feeds_aliases]
                if any(alias in core_content for alias in lower_aliases):
                    aliases_existing = True
                    found_stocks.append(stock.name.lower())

            # Write aliases_existing and found_stocks
            if client_type == 'etcd':
                etcd_client.put(f'/feeds/{item_key}/processed/aliases_existing', json.dumps(aliases_existing))
                etcd_client.put(f'/feeds/{item_key}/processed/stocks', json.dumps(found_stocks))
            elif client_type == 'redis':
                redis_client.put(f'/feeds/{item_key}/processed/aliases_existing', json.dumps(aliases_existing))
                redis_client.put(f'/feeds/{item_key}/processed/stocks', json.dumps(found_stocks))

            if aliases_existing:
                # Summarize the core content using Sumy
                summary = summarize_text(core_content)

                try:
                    response = analyze_sentiment(summary)
                    if client_type == 'etcd':
                        etcd_client.put(processed_label_key, response['outputs'][0]['data'][0]['label'])
                        etcd_client.put(f'/feeds/{item_key}/processed/summary', summary)
                    elif client_type == 'redis':
                        redis_client.put(processed_label_key, response['outputs'][0]['data'][0]['label'])
                        redis_client.put(f'/feeds/{item_key}/processed/summary', summary)

                    for stock in found_stocks:
                        news_path = f'/stocks/{stock}/news/{timestamp}'
                        feed_pointer = f'/feeds/{item_key}'
                        if client_type == 'etcd':
                            etcd_client.put(news_path, feed_pointer)
                            etcd_client.put(f'/stocks/{stock}/', "")
                            etcd_client.put(f'/stocks/{stock}/news/latest', news_path)
                        elif client_type == 'redis':
                            redis_client.put(news_path, feed_pointer)
                            redis_client.put(f'/stocks/{stock}/', "")
                            redis_client.put(f'/stocks/{stock}/news/latest', news_path)

                        # Ping the Telegram bot REST endpoint for each stock
                        try:
                            telegram_bot_url = f"{fnsa_inference_config.telegram_bot_uri}?stock_name={stock}"
                            response = requests.post(telegram_bot_url, json={"message": f"Feed processing completed for {stock}."})
                            if response.status_code == 200:
                                logger.info(f"Successfully pinged Telegram bot endpoint for {stock}.")
                            else:
                                logger.error(f"Failed to ping Telegram bot endpoint for {stock}. Status code: {response.status_code}")
                        except Exception as e:
                            logger.error(f"Error pinging Telegram bot endpoint for {stock}: {e}")
                except (ValueError, ConnectionError) as e:
                    logger.error(f"Error processing feed {item_key}: {e}")

app.conf.beat_schedule = {
    'process-feeds': {
        'task': 'pipeline.tasks.process_feeds',
        'schedule': fnsa_inference_config.jobs_interval_seconds,
        'args': ('etcd',)  # or ('redis',) to use Redis
    },
}
app.conf.timezone = 'UTC'
