import random 
import json
import time

from celery import shared_task
from django.utils import timezone
from django.core.cache import cache

from trend.apis import get_ffk3
from trend.models import Result  
from trend.functions import get_ffk3_piece_trend_data_range

from utils.cache import rate_limit_lock


# @rate_limit_lock("process_data_{gameUniqueId}_{uniqueIssueNumber}")
def process_data(gameUniqueId, uniqueIssueNumber, openCode, **kwargs):
    key = f'TREND:{gameUniqueId}:{uniqueIssueNumber}:{openCode}'
    if cache.get(key): return False
    print(f'Creating {gameUniqueId}.{uniqueIssueNumber} = {openCode}')
    Result.objects.get_or_create(
        game_id=gameUniqueId,
        issue_id=uniqueIssueNumber,
        date=timezone.datetime.strptime(str(uniqueIssueNumber)[:8], '%Y%m%d').date(),
        defaults={
            'open_code': openCode
        }
    )
    cache.set(key, True, timeout=86400)
    return True


@shared_task
def update_ffk3():
    st = time.time()

    for retry_count in range(20):
        if cache.get(f'TREND:FFK3:RETRYNUM:{retry_count}') is None: 
            print(f'Tring retry count: {retry_count}, loading!')
            data = get_ffk3(40)
            update_trend = 0
            for row in data:
                update_trend += process_data(**row)
            
            if update_trend:
                update_ffk3_trend_data.delay()
                # print(f'Reduce retry num to: {retry_count - 2}')
                # cache.delete(f'TREND:FFK3:RETRYNUM:{retry_count - 1}')
                break
            else:
                print(f'Setting retry num up to {retry_count - 1}')
                cache.set(f'TREND:FFK3:RETRYNUM:{retry_count - 1}', 1, 3600)
        else:
            print(f'Tring retry count: {retry_count}, continued!')
        
        if time.time() - st > 60: break
        time.sleep(3)


@shared_task
def update_ffk3_trend_data():
    
    data = Result.ffk3.today()
    today = timezone.now().date().isocalendar()
    objects = list(data.order_by('issue_id'))

    types = ['ds', 'dx']
    # choices = [13, 14, 15]

    limit = 1000
    # res = []

    for group in range(limit):
        s = today.year
        s = s * 100 + today.week
        s = s * 100 + today.weekday
        s = s * 100 + group % 100
        s = s * 10 + group // 100

        for _type in types:
            random.seed(s)
            nxt = None
            conn = 0
            last = None
            # row = {'now': False}
            row = {}
            res = []

            for obj in objects:
                if _type == 'ds': value = obj.singular
                else: value = obj.large
                if last != (nxt == value):
                    # if conn in choices:
                    if row:
                        res.append({**row})
                    last = nxt == value
                    conn = 0
                conn += 1
                row['value'] = value
                row['predict'] = nxt
                row['conn'] = conn
                row['last'] = last
                # row['group']= group % 100 + 1
                # row['page'] = group // 100 + 1
                row['num'] = int(obj.issue_id) % 10000
                # row['type'] = _type
                nxt = row['next_predict'] = random.randint(0, 1)

            row['now'] = True
            res.append({**row})

            _=cache.set(f'TREND:FFK3:PRED:{group // 100 + 1}:{group % 100 + 1}:{_type}:LIST', json.dumps(res))
    
    update_ffk3_trend_feat_data.delay('g13,g10', 10)
    update_ffk3_trend_feat_data.delay('g7,g4,g4')
    update_ffk3_trend_feat_data.delay('g7,g4,g3')
    update_ffk3_trend_feat_data.delay('cl2,l-21')



def process_continue(data, tag, num):
    res = []
    cnt = 0
    pre = None
    for row in data:
        if tag == 'g' and row['conn'] < num or tag == 'l' and row['conn'] > num:
            if pre:
                res.append(pre)
                pre['conn'] = cnt
                cnt = 0
                pre = None
            # res.append(row)
        else:
            cnt -= 1
            pre = row
    if pre:
        res.append(pre)
        pre['conn'] = cnt
    return res


@shared_task
def update_ffk3_trend_feat_data(raw_feat, gte=1, lte=100):
    feat = raw_feat.split(',')
    func = lambda x: x
    for i, cf in enumerate(feat):
        if cf.startswith('c'):
            func = lambda x: process_continue(x, cf[1], int(cf[2:]))
            break
    feat = [f for f in feat if not f.startswith('c')]
    
    
    res = []
    for group in range(1000):
        for _type in ['ds', 'dx']:
            raw_data = get_ffk3_piece_trend_data_range(group, _type, 1, 100, False)

            data = [row for row in raw_data if gte <= row['conn'] <= lte]
            data = func(data)
            data = [
                {**row, 'index': i}
                for i, row in enumerate(data)
            ]

            N = len(feat)
            M = len(data)
            for i in range(M - N + 1):
                for j, fn in enumerate(feat):
                    row = data[i + j]
                    tag = fn[0]
                    num = int(fn[1:])
                    if tag == 'g' and row['conn'] >= num:
                        pass
                    elif tag == 'l' and row['conn'] <= num:
                        pass
                    else:
                        break
                else:
                    if row['index'] + 1 < len(data):
                        row['nxt'] = data[row['index'] + 1]['conn']
                        if data[row['index'] + 1].get('now'):
                            row['nxt_now'] = True
                    res.append({
                        'group': group % 100 + 1,
                        'page': group // 100 + 1,
                        'type': _type,
                        **row,
                    })
    _=cache.set(f'TREND:FFK3:PRED:FEAT:{raw_feat}:{gte}:{lte}:LIST', json.dumps(res))
            