import json
from package.connector.kafka_db import kafka_consumer
from package.connector.sql_db import db
from package.connector.redis_db import rdb
from model.model import AUTODetect
from setting import setting
from common.cache import auto_detect_cache
from package.connector.sql_db import db
from package.connector.redis_db import rdb
from package.connector.elastic_db import es_db
from package.connector.kafka_db import kafka_producer
from package.fastapi.job import scheduler_job
from setting import setting


class CronTask:
    
    @classmethod
    def cron_index_ilm(cls):
        """索引生命周期检测"""
        resp = es_db.client.cat.aliases(index='dsm-*', format='json')
        return [{}]


class KafkaTask:

    Topic = 'receiver_data'

    @classmethod
    def consume(cls):
        """
        消费数据 获取所有发现的数据源
        添加新增的数据发现
        """
        kafka_consumer.consumer.subscribe([cls.Topic])
        while True:
            messages = kafka_consumer.consume(num_messages=5000, timeout=5)
            if not messages:
                continue

            set_source = set()
            for item in messages:
                if item.error():
                    continue
                
                message = json.loads(item.value())
                source_key = ({message["category"]}, {message["host"]}, {message["port"]})
                set_source.add(source_key)
            
            if not set_source:
                continue

            cls.add_detect(set_source)

    @classmethod
    def add_detect(cls, detect):
        """添加新增的数据发现"""
        detect_keys = auto_detect_cache.get()
        new_detect_source = detect - detect_keys
        if not new_detect_source:
            return
        
        values = []
        with db.SessionLocal() as s:
            for category, host, port in new_detect_source:
                s.add(AUTODetect(category=category, host=host, port=port))
                values.append(json.dumps((category, host, port), ensure_ascii=False))
            s.commit()
        auto_detect_cache.set(*values)


if __name__ == '__main__':
    db.init(url=setting.pg_uri, pool_pre_ping=True)
    rdb.init(host=setting.redis_host, password=setting.redis_password)
    es_db.init(hosts=setting.elasticsearch_hosts, http_auth=setting.elasticsearch_auth)
    kafka_producer.init(**{'bootstrap.servers': setting.kafka_servers, **setting.kafka_options})
    scheduler_job.init_config(store_configure=dict(host=setting.redis_host, password=setting.redis_password,
                                        jobs_key='collector2:apscheduler',
                                        run_times_key='collector2:apscheduler.runtime'),
                    default_configure={'coalesce': True, 'max_instances': 1}, 
                    is_background=False)

    db.init(url=setting.pg_uri, pool_pre_ping=True)
    rdb.init(host=setting.redis_host, password=setting.redis_password)
    kafka_consumer.init(**{'bootstrap.servers': setting.kafka_servers, **setting.kafka_options, 'group.id': 'collector_auto_detect'})
    scheduler_job.scheduler.start()
    KafkaTask.consume()
