import json
import datetime
from package.connector.kafka_db import kafka_consumer, kafka_producer
from package.connector.sql_db import db
from package.connector.redis_db import rdb
from setting import setting 
from package.fastapi.job import scheduler_job
from common.cache import notify_cache
from common.notify import NotifyMap


class SystemTask:
    """系统任务
    定时任务
    Kafka任务
    """
    @classmethod
    def keepalive(cls):
        print(f'{datetime.datetime.utcnow()}: scheduler job is alive')


class CronTask(SystemTask):
    """定时任务"""
    @classmethod
    def add_job(cls):
        scheduler_job.add_job(job_id='collector:cron:keepalive',
                              func=cls.keepalive,
                              cron={'interval': 10, 'unit': 'seconds'})


class KafkaTask(SystemTask):
    NotifyTopic = 'system_notify_test'

    @classmethod
    def execute_system_notify_test(cls, task):
        """执行 通告队列内的任务"""
        notify_configure = task['notify_configure']
        data = task['data']

        notify_cache_data = notify_cache.get(notify_configure['id'])
        if not notify_cache_data:  # 通告配置
            raise Exception('无通告配置')
        
        notify_task = NotifyMap[notify_cache_data['type']](**notify_cache_data['configure'])
        notify_task.notify(**data)

    @classmethod
    def consume(cls):
        """
        消费数据 获取所有发现的数据源
        添加新增的数据发现
        """
        kafka_consumer.consumer.subscribe([cls.NotifyTopic])

        while True:
            messages = kafka_consumer.consume(num_messages=10, timeout=5)
            if not messages:
                continue

            for item in messages:
                if item.error():
                    continue
                
                topic = item.topic()
                task = json.loads(item.value())
                getattr(cls, f'execute_{topic}')(task)


if __name__ == '__main__':
    db.init(url=setting.pg_uri, pool_pre_ping=True)
    rdb.init(host=setting.redis_host, password=setting.redis_password)
    kafka_producer.init(**{'bootstrap.servers': setting.kafka_servers, **setting.kafka_options})
    kafka_consumer.init(**{'bootstrap.servers': setting.kafka_servers, **setting.kafka_options, 'group.id': 'system_task'})
    scheduler_job.init_config(store_configure=dict(host=setting.redis_host, password=setting.redis_password,
                                          jobs_key='collector2:apscheduler',
                                          run_times_key='collector2:apscheduler.runtime'),
                    default_configure={'coalesce': True, 'max_instances': 1}, is_background=True)
    
    CronTask.add_job()
    scheduler_job.scheduler.start()
    KafkaTask.consume()
