from package.connector.sql_db import db
from package.connector.redis_db import rdb
from package.connector.elastic_db import es_db
from package.connector.kafka_db import kafka_producer
from package.task import task
from setting import setting

db.init(url=setting.pg_uri, pool_pre_ping=True)
rdb.init(host=setting.redis_host, password=setting.redis_password)
kafka_producer.init(**{'bootstrap.servers': setting.kafka_servers})
es_db.init(hosts=f'http://{setting.elasticsearch_host}:{setting.elasticsearch_port}')
task.init_config(store_configure=dict(host=setting.redis_host, password=setting.redis_password,
                                      jobs_key='collector2:apscheduler',
                                      run_times_key='collector2:apscheduler.runtime'),
                 default_configure={'coalesce': True, 'max_instances': 3}, is_background=False)


def load_cron():
    """加载 数据库内定时任务"""
    from router.devops.view import devops
    s = db.SessionLocal()
    for item in devops.get_items(s).all():
        devops.add_heartbeat_job(item)
        devops.add_metric_job(item)
    s.close()


if __name__ == '__main__':
    load_cron()
    task.scheduler.start()
