# -*- coding: utf-8 -*-

"""
publish data to redis
"""

from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division

import time

try:
    from rediscluster import StrictRedisCluster as RedisCluster
except ImportError:
    from rediscluster import RedisCluster

from kgpipeline.job import KgJob, JobInputMisMatchError, JobConfigError
from kgpipeline.jsonutil import entity_json_decode, entity_json_encode

import logging

logger = logging.getLogger(__name__)


# redis_cluster_hosts = [
#     {"host": "99.13.219.128", "port": 6379},
#     {"host": "99.13.219.129", "port": 6379},
#     {"host": "99.13.219.130", "port": 6379}
# ]
#
# # ST cluster密码
# redis_cluster_password = 'lz-e3mPXzlo1KCZhuBl59rMWam2fwVHz'
def get_redis_uploader(hosts, password, prefix, batch_size):
    def __uploader__(index, items):
        try:
            client = RedisCluster(startup_nodes=hosts, password=password,
                                  skip_full_coverage_check=True)
            pl = client.pipeline()
            total_count = 0
            batch_count = 0
            for item_key, item_values in items:
                batch_count += 1
                pl.sadd("{0}:{1}".format(prefix, item_key), *item_values)
                if batch_count > batch_size:
                    pl.execute()
                    total_count += batch_count
                    batch_count = 0
            if batch_count > 0:
                pl.execute()
                total_count += batch_count
            yield "partition {0} added total {1} keys".format(index, total_count)
        except Exception as e:
            yield e

    return __uploader__


def remove_keys(client, prefix):
    start_time = time.time()
    item_count = 0
    batch_size = 100000
    keys = []

    logger.info("Start scanning keys...")

    for k in client.scan_iter("{0}:*".format(prefix), count=batch_size):
        keys.append(k)
        if len(keys) >= batch_size:
            item_count += len(keys)
            logger.info("batch delete to {} ...".format(item_count))

            client.delete(*keys)
            keys = []

    if len(keys) > 0:
        item_count += len(keys)
        logger.info("batch delete to {}".format(item_count))

        client.delete(*keys)

    end_time = time.time()

    logger.info("deleted {0} keys in {1:0.3f} ms.".format(item_count, (end_time - start_time) / 1000.0))


# sample meta_info:
# 'name', 'pubfund-hsjy'
# 'name_zh', '公募恒生聚源'
# 'esusername', ''
# 'espassword', ''
# 'esurl', 'http://55.12.210.55:9200/ry_pub_fund_hsjy/doc'
# 'neo4jurl', ''
# 'neo4jusername', 'neo4j'
# 'neo4jpassword', 'password'
def update_meta(client, app_id, prefix, meta_info):
    meta_key = "meta-kg-{0}".format(app_id)

    client.hset(meta_key, 'prefix', prefix)

    for key, value in meta_info.items():
        client.hset(meta_key, key, value)

    client.hset("prefix-index", prefix, meta_key)


class KgRedisJob(KgJob):
    """job for publish to redis"""
    type_key = "redis"

    def process(self, inputs):
        if len(inputs) != 1:
            raise JobInputMisMatchError("needs exactly one input json file for a Redis Job")

        batch_size = self.config.get("batch_size", 10000)

        prefix = self.config.get("prefix")
        if not prefix:
            raise JobConfigError("Input parameter `prefix` is missing!")

        hosts = self.config.get("redis_hosts")
        if not hosts:
            raise JobConfigError("Input parameter `redis_hosts` is missing!")

        password = self.config.get("redis_password")
        # password can be absent
        # if not password:
        #     raise JobConfigError("Input parameter `redis_password` is missing!")

        app_id = self.config.get("app_id")
        if not app_id:
            raise JobConfigError("Input parameter `app_id` is missing!")

        meta_info = self.config.get("meta")
        if not meta_info:
            raise JobConfigError("Input parameter `meta` is missing!")

        client = RedisCluster(startup_nodes=hosts, password=password,
                              skip_full_coverage_check=True)

        remove_keys(client, prefix)

        df, _ = inputs[0]
        names = df.rdd.map(lambda x: entity_json_decode(x["value"])).map(
            lambda x: (x[0], [entity_json_encode(e) for e in x[1]]))

        uploader = get_redis_uploader(hosts, password, prefix, batch_size)
        results = names.mapPartitionsWithIndex(uploader).collect()

        for i, res in enumerate(results):
            logger.info("partition {0}: {1}".format(i, res))

        update_meta(client, app_id, prefix, meta_info)


KgJob.register("redis", KgRedisJob)
