#!/usr/bin/env python
# coding=utf-8

"""

@author: tongzhenguo

@time: 2020/4/8 上午10:51

@desc:


"""

import sys

import ConfigParser
import redis
from py4j.protocol import Py4JJavaError
from pyspark.sql import SparkSession


class RedisDb:
    def __init__(self, **kwargs):
        self.kwargs = kwargs
        self.redis_list = []
        for i, host in enumerate(kwargs['host_list']):
            try:
                port = kwargs['port_list'][i]
            except Exception:
                port = kwargs['port']
            r = redis.StrictRedis(host=host, port=port, db=kwargs['db'], password=kwargs['passwd']
                                  , max_connections=1)
            self.redis_list.append(r)
        self.expire_time = kwargs['expire_time']
        self.size = len(self.redis_list)
        self.valueStoreType = kwargs['valueStoreType']
        self.get_sharding_key_fn = kwargs['get_sharding_key_fn']
        self.shard_fn = kwargs['shard_fn']
        self.is_full_fn = kwargs['is_full_fn']

    def write_batch(self, kv_dict):
        # key为redis实例,value为存到这个实例的数据
        redis_kv_dict = {}
        mode = self.kwargs['shard']
        prefix = None
        suffix = None
        if 'prefix' in self.kwargs:
            prefix = self.kwargs['prefix']

        if 'suffix' in self.kwargs:
            suffix = self.kwargs['suffix']

        for i in range(self.size):
            redis_kv_dict[self.redis_list[i]] = {}
        for k, v in kv_dict.items():
            sharding_key = self.get_sharding_key_fn(k, prefix, suffix)
            ret = self.shard_fn(sharding_key, mode)
            if not ret:
                print('sharding error for key %s' % k)
                continue
            for r in ret:
                redis_kv_dict[r][k] = v
        for r, kv in redis_kv_dict.items():
            self.write_batch_base(r, kv)

    def write_batch_base(self, redis_r, kv_dict):
        if not kv_dict:
            return True
        try:
            p = redis_r.pipeline(transaction=False)
            for key, value in kv_dict.items():
                p.delete(key)
                if self.valueStoreType.upper() == "LIST":
                    p.rpush(key, *value)
                elif self.valueStoreType.upper() == "STRING":
                    p.set(key, value)
                elif self.valueStoreType.upper() == "HASH":
                    p.hmset(key, value)
                p.expire(key, self.expire_time)
            p.execute()
            kv_dict.clear()
            return True
        except Exception as exception:
            print("%s, %s" % (str(exception), "write batch wrong"))


# 封装redis 配置类
class RedisBuilder:
    def __init__(self, conf):
        self.conf = conf
        self.data_source_map_redis_conf = self.parse_redis_conf()

    def parse_redis_conf(self):
        """返回数据源与redis配置的映射关系"""
        data_source_map_redis_conf = {}
        cf = ConfigParser.ConfigParser()
        cf.read(self.conf)
        global_dict = {}
        sub_dict = {}

        if cf.has_option("global", "host"):
            global_dict['host'] = cf.get("global", "host")
        if cf.has_option("global", "port"):
            global_dict['port'] = cf.get("global", "port")
        if cf.has_option("global", "db"):
            global_dict['db'] = cf.get("global", "db")
        if cf.has_option("global", "passwd"):
            global_dict['passwd'] = cf.get("global", "passwd")
        if cf.has_option("global", "expire_time"):
            global_dict['expire_time'] = cf.get("global", "expire_time")
        if cf.has_option("global", "shard"):
            global_dict['shard'] = cf.get("global", "shard")
        if cf.has_option("global", "valueStoreType"):
            global_dict['valueStoreType'] = cf.get("global", "valueStoreType")

        cf.remove_section("global")
        for sub_conf in cf.sections():
            sub_dict[sub_conf] = {}
            for option in ['host', 'port', 'db', 'passwd', 'expire_time', 'shard', 'valueStoreType']:
                if cf.has_option(sub_conf, option):
                    sub_dict[sub_conf][option] = cf.get(sub_conf, option)
                elif option in global_dict:
                    sub_dict[sub_conf][option] = global_dict[option]
                else:
                    raise RuntimeError('option %s not found ' % option)

            # optional options
            for optional_option in ['prefix', 'suffix']:
                if cf.has_option(sub_conf, optional_option):
                    sub_dict[sub_conf][optional_option] = cf.get(sub_conf, optional_option)
                elif optional_option in global_dict:
                    sub_dict[sub_conf][optional_option] = global_dict[optional_option]

            data_source_map_redis_conf[sub_conf] = sub_dict[sub_conf]

        return data_source_map_redis_conf

    def get(self, data_source):
        """获取指定数据源的redis实例"""
        kwargs = {}
        redis_conf = self.data_source_map_redis_conf[data_source]
        port_list = []
        if '-' in redis_conf['port']:  # 多个port配置
            start = redis_conf['port'].split('-')[0]
            end = redis_conf['port'].split('-')[1]
            port_list.extend(range(int(start), int(end) + 1))
        elif ',' in redis_conf['port']:  # 支持port枚举
            port_list.extend(eval(redis_conf['port']))
        else:  # 单个port配置
            port_list.append(redis_conf['port'])
        kwargs['port_list'] = port_list
        for config, value in redis_conf.items():
            kwargs[config] = value
            if 'host' == config:
                host_list = []
                if ',' in value:  # 指定host列表
                    host_list = value.split(',')
                elif '{port}' in value:  # 指定需要对port进行宏替换
                    for port in port_list:
                        host_list.append(redis_conf['host'].replace('{port}', str(port)))
                else:  # 指定单个host
                    host_list.append(redis_conf['host'])
                kwargs['host_list'] = host_list
        return RedisDb(**kwargs)


def partition_func(iter, file_size, data_source):
    """
    封装spark 分区写入redis操作
    """
    rb = RedisBuilder('redis.conf')
    try:
        redis_db = rb.get(data_source)
    except Exception as e:
        raise RuntimeError("get redis info error, %s" % repr(e))
    print("INFO get redis info from redis.conf,data_source:%s ,redis_db:%s " % (data_source, redis_db))

    try:
        # 写入步长
        write_batch_size = 500

        info_dict = {}
        line_cnt = 0
        for record in iter:
            line_cnt += 1
            if len(record) != 2:
                # print parts
                continue
            # key, values = paser(record)
            if len(record) != 2:
                raise Exception("data file paser error,len(parts) != 2")
            key = record[0]
            if redis_db.valueStoreType.upper() == "LIST":
                values = [mvid for mvid in record[1].split(",")]
            elif redis_db.valueStoreType.upper() == "STRING":
                values = record[1]
            elif redis_db.valueStoreType.upper() == "HASH":
                values = {info.split(":")[0]: info.split(":")[1] for info in record[1].split(",")}
            else:
                raise Exception("NOT SUPPORT REDIS TYPE FOR %s" % redis_db.valueStoreType.upper())
            info_dict[key] = values
            if line_cnt % write_batch_size == 0:
                if redis_db.is_full_fn():
                    raise RuntimeError("ERROR :REDIS MEM UTILS >= 80% , data_source : " + data_source)
                if not redis_db.is_full_fn():
                    redis_db.write_batch(info_dict)
                    info_dict.clear()
                print("writing to redis , cnt : %s , processing %s" % (line_cnt, str(100 * line_cnt / file_size) + "%"))

        redis_db.write_batch(info_dict)
    except Exception as e:
        raise RuntimeError("write to redis error , %s" % repr(e))


if __name__ == "__main__":
    hdfs_path = sys.argv[1].strip()
    spark = SparkSession.builder.getOrCreate()
    try:
        retDF = spark.read.orc(hdfs_path)
        file_size = retDF.count()
        retDF.show(n=20, truncate=500)
        retDF.foreachPartition(lambda x: partition_func(x, file_size, ""))
        print("writing to redis success, cnt : %s , processing %s" % (file_size, "100%"))
    except Py4JJavaError as e:
        print("Py4JJavaError: {0}".format(e))
