'''
Collect data per second, and release it to kafka

An extra table is used for time drift storage
'''
import atexit
from importlib import import_module
import logging
from logging.handlers import TimedRotatingFileHandler
import os
import time

from kafka import KafkaProducer
from prometheus_client import start_http_server
import pymysql

from utils.release_helper import create_sql_template
from utils.release_helper import release_factory
from utils.time_drift import get_drift
from utils.timestamp import Timestamp
from utils.timestamp import get_now
from utils.timestamp import get_date
from utils.timestamp import get_timestamp


def release(release_helpers, last, drift):
    '''Release data'''
    # 1. Prepare time range
    now = get_now()
    timestamp = get_timestamp(now)
    start = last.timestamp - drift
    end = timestamp - drift
    date = get_date(now)

    # 2. Release
    for release_helper in release_helpers:
        task = release_helper.task
        sql_template = create_sql_template(start, end, task.colnames,
                                           task.msg_column)
        release_helper.release(drift, date, sql_template)

    # 3. Update last time
    last.refresh(timestamp)
    return last


def main(last, tasks, producer, conn, storage):
    '''
    Release data in stream.
    '''
    logger = logging.getLogger('main')

    tasks = [release_factory(task, producer, conn, storage) for task in tasks]

    last_drift = 0
    while True:
        conn.ping(reconnect=True)
        drift = get_drift(conn)
        if drift is None:
            logger.error('Failed to fetch time drift!')
            break
        if drift != last_drift:
            last_drift = drift
            logger.info('drift is changed to %d', drift)

        last = release(tasks, last, drift)
        producer.flush()

        now = get_timestamp(get_now())
        duration = (now - last.timestamp) / 1000
        if duration < 0.5:
            # Wait if not busy
            time.sleep(1 - duration)


def before_exit(producer):
    '''Do something if exception arises'''
    producer.flush()


if __name__ == '__main__':
    CONFIG = import_module('config')

    PRODUCER = KafkaProducer(**CONFIG.KAFKA_CONFIG)
    atexit.register(before_exit, producer=PRODUCER)

    os.makedirs('logs', exist_ok=True)
    LOG_HANDLER = TimedRotatingFileHandler(os.path.join('logs', 'log'),
                                           when="midnight", interval=1)
    logging.basicConfig(level=logging.INFO, format=CONFIG.LOG_FORMAT,
                        datefmt='%Y-%m-%dT%H:%M:%S', handlers=[LOG_HANDLER, ])
    start_http_server(CONFIG.MONITOR_PORT)

    main(last=Timestamp(),
         tasks=import_module('task').TASKS,
         producer=PRODUCER,
         conn=pymysql.connect(**CONFIG.DB_CONFIG),
         storage=CONFIG.STORAGE_ROOT)
