# -*- coding: utf-8 -*-

"""
kg json to elasticsearch job
"""

from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import

from kgpipeline.job import KgJob, JobInputMisMatchError, JobConfigError
from kgpipeline.jsonutil import entity_json_decode

try:
    from elasticsearch import Elasticsearch
    from elasticsearch.helpers import bulk
except ImportError:
    from elasticsearch2 import Elasticsearch
    from elasticsearch2.helpers import bulk

import logging

logger = logging.getLogger(__name__)


def doc_from_kg_entity(item, index, operation):
    item["_index"] = index
    item["_type"] = "doc"
    item["_id"] = item["@id"]
    item["_op_type"] = operation
    return item


def get_uploader(hosts, username, password):
    def upload_partition(_, items):
        if username:
            es = Elasticsearch(hosts=hosts, http_auth=(username, password))
        else:
            es = Elasticsearch(hosts=hosts)

        yield bulk(es, items,
                   stats_only=False, raise_on_error=False,
                   chunk_size=100,
                   max_chunk_bytes=20*1024*1024,
                   request_timeout=120)

    return upload_partition


class KgEsJob(KgJob):
    type_key = "es"

    def process(self, inputs):
        """
        publish entities to elasticsearch.

        only one input support.

        config includes:
        - date: current date
        - partition: output partitions. optional, default 10
        - alias: index alias
        - index: index name
        - hosts: es hosts
        - settings: es settings. optional, default empty
        - mappings: es mappings (schema). optional, default empty
        - username: es user. optional, default empty
        - password: es password. optional, default empty
        - operation: es operation. optional, default 'index'

        Notes
        -----
        the type in index is hardcoded ``doc``
        """
        if len(inputs) != 1:
            raise JobInputMisMatchError("needs exactly one input json file for an Elasticsearch Job")

        today = self.config["date"].replace("-", "")

        partition = self.config.get("partition", 10)

        alias = self.config.get("alias")
        if not alias:
            raise JobConfigError("Input parameter `alias` is missing!")

        index = self.config.get("index")
        if not index:
            raise JobConfigError("Input parameter `index` is missing!")

        index_date = index + "_" + today

        body = {
            "settings": self.config.get("settings", {}),
            "mappings": self.config.get("mappings", {})
        }

        print("es index is", index_date)
        print("es alias is", alias)

        hosts = self.config.get("hosts")
        if not hosts:
            raise JobConfigError("Input parameter `hosts` is missing!")

        username = self.config.get("username")
        password = self.config.get("password")

        operation = self.config.get("operation", "index")

        if username:
            es = Elasticsearch(hosts=hosts, http_auth=(username, password))
        else:
            es = Elasticsearch(hosts=hosts)

        if operation != "upsert":
            if es.indices.exists(index=index_date):
                es.indices.delete(index=index_date)

        if not es.indices.exists(index=index_date):
            es.indices.create(index=index_date, body=body)

        uploader = get_uploader(hosts, username, password)

        df, _ = inputs[0]

        docs = df.rdd.map(lambda x: entity_json_decode(x["value"])).map(
            lambda x: doc_from_kg_entity(x, index_date, operation)
        ).repartition(partition)

        upload_stat = docs.mapPartitionsWithIndex(
            uploader
        ).collect()

        from pprint import pprint
        for p_stat in upload_stat:
            pprint(p_stat)

        index_alias = es.indices.get_alias()

        indices = []
        for index_name, alias_config in index_alias.items():
            # name part in name_YYYYMMDD
            if index_name[:-9] == index:
                if alias in alias_config["aliases"]:
                    es.indices.delete_alias(index=index_name, name=alias)
                # date part in name_YYYYMMDD
                indices.append(index_name[-8:])

        sorted_indices = [index + "_" + d for d in sorted(indices)]

        logger.info("all indices: {}".format(",".join(sorted_indices)))

        remain = self.config.get("remain", 3)
        if remain <= 0:
            remain = 1

        to_delete = sorted_indices[:-remain]

        if len(to_delete) > 0:
            logger.info("delete following indices to remain {}: {}".format(remain, ",".join(to_delete)))
            print()
            es.indices.delete(index=",".join(to_delete))

        if not es.indices.exists_alias(index=sorted_indices[-1], name=alias):
            print("add alias {0} to {1}".format(alias, sorted_indices[-1]))
            es.indices.put_alias(index=sorted_indices[-1], name=alias)


KgJob.register("es", KgEsJob)
