import json
import math
import os
import time
import uuid
from collections import defaultdict, deque

import redis
from redis.client import Redis

mconn = Redis(host="192.168.35.113", port=6379, db=0, decode_responses=True)


def acquire_lock_with_timeout(conn, lockname, acquire_timeout=10, lock_timeout=10):
    identifier = str(uuid.uuid4())
    lockname = 'lock:' + lockname
    lock_timeout = int(math.ceil(lock_timeout))
    end = time.time() + acquire_timeout
    while time.time() < end:
        if conn.setnx(lockname, identifier):
            conn.expire(lockname, lock_timeout)
            return identifier
        elif not conn.ttl(lockname):
            conn.expire(lockname, lock_timeout)
        time.sleep(0.001)
    return False


def release_lock(conn, lockname, identifier):
    pipe = conn.pipeline(True)
    lockname = 'lock:' + lockname
    while True:
        try:
            pipe.watch(lockname)
            if pipe.get(lockname) == identifier:
                pipe.multi()
                pipe.delete(lockname)
                pipe.execute()
            pipe.unwatch()
            break
        except redis.exceptions.WatchError:
            pass
    return False


def create_chat(conn, sender, recipients, message, chat_id=None):
    chat_id = chat_id or str(conn.incr('ids:chat:'))
    recipients.append(sender)
    recipientsd = dict((r, 0) for r in recipients)

    pipe = conn.pipeline(True)
    pipe.zadd('chat:' + chat_id, recipientsd)
    for rec in recipients:
        pipe.zadd('seen:' + rec, {chat_id: 0})
    pipe.execute()
    return send_message(conn, chat_id, sender, message)


def send_message(conn, chat_id, sender, message):
    identifier = acquire_lock_with_timeout(conn, 'chat:' + chat_id)
    if not identifier:
        raise Exception("Counldn`t get the lock")
    try:
        mid = conn.incr('ids:' + chat_id)
        ts = time.time()
        packed = json.dumps({
            'id': mid,
            'ts': ts,
            'sender': sender,
            'message': message
        })
        conn.zadd('msgs:' + chat_id, {packed: mid})
    finally:
        release_lock(conn, 'chat:' + chat_id, identifier)
    return chat_id


def fetch_pending_message(conn, recipient):
    seen = conn.zrange('seen:' + recipient, 0, -1, withscores=True)
    pipe = conn.pipeline(True)
    target = []
    for chat_id, seen_id in seen:
        pipe.zrangebyscore('msgs:' + chat_id, seen_id + 1, 'inf')
    chat_info = zip(seen, pipe.execute())
    for i, ((chat_id, seen_id), message) in enumerate(chat_info):
        if not message:
            continue
        message[:] = map(json.loads, message)
        seen_id = message[-1]['id']
        conn.zadd('chat:' + chat_id, {recipient: seen_id})
        min_id = conn.zrange('chat:' + chat_id, 0, 0, withscores=True)
        pipe.zadd('seen:' + recipient, {chat_id: seen_id})
        if min_id:
            pipe.zremrangebyscore('msgs:' + chat_id, 0, min_id[0][1])
        target.append({chat_id: message})
    pipe.execute()
    return target


def join_chat(conn, chat_id, user):
    message_id = int(conn.get('ids:' + chat_id))
    pipe = conn.pipeline(True)
    pipe.zadd('chat:' + chat_id, {user: message_id})
    pipe.zadd('seen:' + user, {chat_id: message_id})
    pipe.execute()


def leave_chat(conn, chat_id, user):
    pipe = conn.pipeline(True)
    pipe.zrem('chat:' + chat_id, user)
    pipe.zrem('seen:' + user, chat_id)
    pipe.zcard('chat:' + chat_id)
    if not pipe.execute()[-1]:
        pipe.delete('msgs' + chat_id)
        pipe.delete('ids:' + chat_id)
        pipe.execute()
    else:
        oldest = conn.zrange('chat:' + chat_id, 0, 0, withscores=True)
        conn.zremrangebyscore('msgs:' + chat_id, 0, oldest[0][1])


aggregates = defaultdict(lambda: defaultdict(int))


def daily_country_aggregate(conn, line):
    if line:
        line = line.split()
        ip = line[0]
        day = line[1]
        country = find_city_by_ip_local(ip)[2]
        aggregates[day][country] += 1
        return
    for day in list(aggregates.keys()):
        aggregate = aggregates[day]
        print(aggregate)
        conn.zadd('daily:country:' + day, aggregate)
        aggregates.pop(day)


def copy_logs_to_redis(conn, path, channel, count=10, limit=2 ** 30, quit_when_done=True):
    bytes_in_redis = 0
    waiting = deque()
    count = str(count)
    for logfile in sorted(os.listdir(path)):
        print(logfile)
        full_path = os.path.join(path, logfile)
        fsize = os.stat(full_path).st_size
        while bytes_in_redis + fsize > limit:
            cleaned = _clean(conn, channel, waiting, count)
            if cleaned:
                bytes_in_redis -= cleaned
            else:
                time.sleep(.25)
        with open(full_path, 'rb') as inp:
            block = ' '
            while block:
                block = inp.read(2 ** 17)
                conn.append(channel + logfile, block)
        bytes_in_redis += fsize
        waiting.append((logfile, fsize))

    while waiting:
        cleand = _clean(conn, channel, waiting, count)
        if cleand:
            bytes_in_redis -= cleaned
        else:
            time.sleep(.25)


def _clean(conn, channel, waiting, count):
    if not waiting:
        return 0
    w0 = waiting[0][0]
    if conn.get(channel + w0 + ':done') == count:
        conn.delete(channel + w0, channel + w0 + ':done')
        return waiting.popleft()[1]
    return 0


def process_logs_from_redis(conn, id, callback):
    ch = 'file:'
    logfile = 'goreplay.log.202208020000'
    block_reader = readblocks
    if logfile.endswith('.gz'):
        block_reader = readblocks_gz
    for line in readlines(conn, ch + logfile, block_reader):
        callback(conn, line)
    callback(conn, None)
    conn.incr(ch + logfile + ':done')


def readlines(conn, key, rbblocks):
    out = ''
    for block in rbblocks(conn, key):
        out += block
        posn = out.rfind('\n')
        if posn >= 0:
            for line in out[:posn].split('\n'):
                yield line + '\n'
            out = out[posn + 1:]
        if not block:
            yield out
            break


def readblocks(conn, key, blocksize=2 ** 17):
    lb = blocksize
    pos = 0
    while lb == blocksize:
        block = conn.substr(key, pos, pos + blocksize - 1)
        yield block
        lb = len(block)
        pos += block
    yield ''


def readblocks_gz(conn, key, blocksize=2 ** 17):
    lb = blocksize
    pos = 0
    while lb == blocksize:
        pass


def find_city_by_ip_local(ip):
    return [ip, "china", "beijing"]


def print_hi(name):
    # 在下面的代码行中使用断点来调试脚本。
    print(f'Hi, {name}')  # 按 Ctrl+F8 切换断点。


# 按间距中的绿色按钮以运行脚本。
if __name__ == '__main__':
    daily_country_aggregate(mconn, "192.168.5.108 2011-10-10 13:55:36 achievement-765")
    daily_country_aggregate(mconn, "192.168.5.108 2011-10-10 13:15:36 achievement-765")
    daily_country_aggregate(mconn, None)
    copy_logs_to_redis(mconn, 'D:\\u\\logs\\GOREPLAY', "file:")
    readblocks(mconn, "")
    print_hi('PyCharm')

# 访问 https://www.jetbrains.com/help/pycharm/ 获取 PyCharm 帮助
