import json
import time
from crawler.exceptions import SendReplyStatusException
from crawler.logger import other_logger
from config import config
from crawler.connection.redis_connection import ConnectRedis
from crawler.connection.mysql_connection import ConnectMysql

redis_conn = ConnectRedis(ip_ports=config.REDIS_HOST + ':' + str(config.REDIS_PORT), db=config.REDIS_DB,
                          user_pass=config.REDIS_PASSWORD)
redis_shedana = ConnectRedis(ip_ports='124.222.45.213' + ':' + '6379', db=7, user_pass='8mkfmskwbwjez3fc')

mysql_db = ConnectMysql(
    ip=config.DATA_DB_HOST,
    port=config.DATA_DB_PORT,
    user_name=config.DATA_DB_USER,
    user_pass=config.DATA_DB_PASSWORD,
    db=config.DATA_DB_NAME)


def filter_push_redis(redis_server, slot_key, task, priority_enabled=True):
    """
    redis插入过滤器，插入set去重判断是否可以插入list，使用lua语句保证原子性
    slot_key：redis_key（保证能在同一个slot存储，避免报错）
    task：具体的value值
    priorityFlag：任务优先级 默认0为低优先级，1为高优先级
    priority_enabled：是否开启优先级判定，默认True开启，可设置为False进行关闭
    """
    priority_flag = task.get('priorityFlag', 0)  # 任务优先级 默认0为低优先级，1为高优先级
    # 通过project_type去区分渠道，控制分发至不同的爬虫集群
    project_type = 'victor'

    # 通过优先级标签去控制是LPUSH（高优先级）还是RPUSH（低优先级），如果手动禁用优先级，则全部走默认RPUSH（低优先级）
    if priority_flag == 1 and priority_enabled:
        priority_type = 'LPUSH'
    else:
        priority_type = 'RPUSH'

    script_push = '''
        local v = redis.call("SADD", KEYS[2], ARGV[1])
        if v == 1
        then
    	    return redis.call("{}", KEYS[1], ARGV[2]) and 1
        else
    	    return 0
        end
        '''.format(priority_type)
    list_key = "{%(slot_key)s}_%(project_type)s_queue" % {'project_type': project_type, 'slot_key': slot_key}
    set_key = "{%(slot_key)s}_%(project_type)s_filter" % {'project_type': project_type, 'slot_key': slot_key}
    filter_id = task['jobId']
    body = json.dumps(task)
    p = [list_key, set_key, filter_id, body]
    result = redis_server.eval(script_push, 2, *p)
    distribute_time = int(time.time() * 1000)
    return distribute_time, result


def filter_pop_redis(redis_server, slot_key, project_type='victor'):
    """
    redis弹出过滤器，list弹出同时set移除，使用lua语句保证原子性
    slot_key：redis_key（保证能在同一个slot存储，避免报错）
    project_type：项目标识 默认victor为dm渠道
    """

    script_pop = '''
    local v = redis.call("LPOP", KEYS[1])
    if v ~= ""
    then
        local firstidx, lastidx, key, value = string.find(v, "jobId\\": (%w*),")
        redis.call("SREM", KEYS[2], key)
    end
    return v
    '''
    list_key = "{%(slot_key)s}_%(project_type)s_queue" % {'project_type': project_type, 'slot_key': slot_key}
    set_key = "{%(slot_key)s}_%(project_type)s_filter" % {'project_type': project_type, 'slot_key': slot_key}
    p = [list_key, set_key]
    result = redis_server.eval(script_pop, 2, *p)
    return result


def left_pop_redis(redis_conn, redis_key):
    result = redis_conn.lpop(redis_key)
    return result


def right_push_redis(redis_conn, redis_key, body):
    result = redis_conn.rpush(redis_key, body)
    return result


def srem_redis(redis_conn, redis_key, value):
    result = redis_conn.srem(redis_key, value)
    return result


def retry_task(task, redis_conn, redis_key):
    """爬虫状态4的内部重试实现"""
    current_retry = task.get('currentRetry', 0)
    retry_counts = task.get('retryCounts', 5)
    task['currentRetry'] = current_retry + 1
    if current_retry <= retry_counts:
        try:
            filter_push_redis(redis_conn, redis_key, task)
            other_logger.info('Retry Task Success : {}'.format(json.dumps(dict(task))))

        except Exception as e:
            other_logger.info('Retry Task Failed : {} | {}'.format(e, json.dumps(dict(task))))

    raise SendReplyStatusException
