#!/usr/bin/env python
# -*- encoding: utf-8 -*-

from elasticsearch import Elasticsearch
import click
import logging
from datetime import timedelta, date


logging.getLogger().setLevel(logging.INFO)
queries = {}
ERROR_TYPE = 3


@click.command()
@click.option('--host', help='elasticsearch host address', default='http://127.0.0.1:9200/',  required=1)
@click.option('--config', help='configuration file of groups', required=1)
def main(host, config):
    readfile(config)
    check_project(host)


def count_by_term(es, project, start_date, yesterday):
    return es.count(index="resultdb", doc_type="result",
                    body={'query': {'bool': {'must': [{'term': {'project': project}},
                                                      {'range': {'result.date': {'gte': start_date, 'lte': yesterday}}}]}}}
                    ).get('count', 0)


def select_all_in_projectdb(es, fields=None, offset=0, limit=0):
    for record in es.search(index="projectdb", doc_type="project",
                            body={}, _source_include=fields or [],
                            from_=offset, size=limit
                            ).get('hits', {}).get('hits', []):
        yield record['_source']


def insert_vacancy(es, error_type, project, date, group, vacancy_dates):
    obj = {
        'errortype': error_type,
        'project': project,
        'date': date,
        'group': group,
        'vacancy_dates': vacancy_dates,
    }
    return es.index(index="errordb", doc_type="error",
                    body=obj, id='%s:%s' % (project, date))


def readfile(config):  #从文件中读出文件内容
    f = file(config)  #打开文件，这里和open效果一样
    global queries
    while True:
        line = f.readline()  #读取每行信息
        if len(line) == 0:
            break
        line = line.strip('\n').decode('utf-8')
        queries[line.split(':')[0]] = line.split(':')[1]
    f.close()


def check_project(host):
    offset = 0
    limit = 20
    projectes = Elasticsearch(hosts=host)
    resultes = Elasticsearch(hosts=host)
    errores = Elasticsearch(hosts=host)
    while True:
        projects = list(select_all_in_projectdb(projectes, offset=offset, limit=limit))
        nmatched = len(projects)
        if nmatched == 0:
            break
        offset += nmatched
        for project in projects:
            if not project:
                continue
            vacancy_dates = 0
            project_name = project.get('name', None)
            project_group = project.get('group', None)
            time_delta = int(queries.get(project_group, 0))
            if not time_delta:
                logging.info("group %s has not configured in config json file", project_group)
                continue
            start_date = (date.today() - timedelta(days=time_delta))
            yesterday = (date.today() - timedelta(days=1))
            count = count_by_term(resultes, project_name, start_date, yesterday)
            logging.info("processing project %s ", project_name)
            count_continue = count
            while True:
                if not count_continue:
                    time_delta += 1
                    start_date_continue = (date.today() - timedelta(days=time_delta))
                    count_continue = count_by_term(resultes, project_name, start_date_continue, yesterday)
                    vacancy_dates = time_delta
                    if time_delta > 50:
                        logging.info("project %s has no data in past %d days", project_name, time_delta)
                        break
                else:
                    break

            if not count:
                logging.info("project %s has no data between %s and %s, insert it to errordb",
                             project_name, start_date, yesterday)
                insert_vacancy(errores, ERROR_TYPE, project_name, yesterday, project_group, vacancy_dates)

if __name__ == '__main__':
    main()

