import json
import logging

import requests
import os
import time
import pandas as pd
from tqdm import *

from app.commons.redis_session import redis_con
from app.service.merge_data.get_token import get_login_token


def get_uniquecode_blacklist():
    r = redis_con()
    result = r.zrange("blacklist_total", start=0, end=-1, withscores=False)
    logging.warning("get blacklist list=%s"%(result))
    return result

def isSourceFile(filename):
    try:
        if get_postfix_from_file_with_dot(filename) not in [".xlsx", ".xls"]:
            return
        if get_filename_from_file(filename) in ["result","log","match","CurrentStock"] or filename=="cache.json":
            return
        return True
    except:
        logging.warning("check isSourceFile failed, filename="+filename)
        return False


def get_date_from_file(file):
    """
    :param file: file_20211111235959.xlsx
    :return:  2021-11-11
    """
    date_str = file.split('.')[0][-14:]
    date_format = ('-').join([date_str[:4], date_str[4:6], date_str[6:8]])
    return date_format



def get_time_from_file(file):
    """
    :param file: file_20211111235959.xlsx
    :return:  23:59:59
    """
    date_str = file.split('.')[0][-14:]
    time_format = ':'.join([date_str[8:10], date_str[10:12], date_str[12:14]])
    return time_format

def get_datetime_from_file(file):
    """
    :param file: file_20211111235959.xlsx
    :return: 2021-11-11 23:59:59
    """
    return get_date_from_file(file) + " " + get_time_from_file(file)

def get_filename_from_file(file):
    """
    :param file: file_20211111235959.xlsx
    :return: file
    """
    return file.split('.')[0][:-15]

def get_postfix_from_file_with_dot(file):
    """
    :param file: file_20211111235959.xlsx
    :return: .xlsx
    """
    return '.'+file.split('.')[1]

def get_filename_from_file_with_postfix(file):
    """
    :param file: file_20211111235959.xlsx
    :return: file.xlsx
    """
    return get_filename_from_file(file)+get_postfix_from_file_with_dot(file)

def merge_filename(filename,date):
    filename_prefix = filename.split('.')[0]
    filename_postfix = filename.split('.')[1]
    date = str(date).replace(' ', '').replace('-', '').replace(':', '')
    filename = filename_prefix + '_' + date + '.' + filename_postfix
    return filename


def GetAllHeadersForExcel(FileName, SheetName):
    AllHeaders = list(pd.read_excel(FileName, SheetName, nrows=0).columns)
    print(AllHeaders)
    for i in range(len(AllHeaders)):
        AllHeaders[i] = AllHeaders[i].lstrip()
    return AllHeaders


def GetAllValueFromOneColumn(ws, column):
    rows = ws.max_row
    column_data = []
    for i in range(2, rows + 1):
        cell_value = ws.cell(row=i, column=column).value
        if cell_value:
            column_data.append(cell_value)
    return column_data


def getCurrentTimeStamp():
    return int(time.time())


def LoadNeedHeaders(headers_map):
    headers_names = []
    for key in headers_map.keys():
        headers_names.append(headers_map[key])
    return headers_names


def IsUsedByLocalCache(CacheDict, unique_code, expiry_time):
    if unique_code not in CacheDict.keys():
        return False
    elif getCurrentTimeStamp() - int(CacheDict[unique_code]) > int(expiry_time):
        return False
    else:
        return True


def CheckIfhaveAllNeed(need_columns, all_columns):
    for i in need_columns:
        if i not in all_columns:
            return False
    return True


def ChangeNumToCharacter(s):
    return chr(s + 65)


def IsUniqueCodeExistedInSourceTable(key, unique_code, repo_list):
    for i in repo_list[key]:
        if unique_code == i[0]:
            return repo_list[key].index(i)
    return -1


def getNeedColumns(all_headers, need_headers):
    v1 = []
    for i in need_headers:
        if i in all_headers:
            v1.append(ChangeNumToCharacter(all_headers.index(i)))
    return str(v1).replace('\'', '')[1:-1]


def load_config(date):
    print('Loading config...')
    config_name="files/config/" + merge_filename("data_config.json",date)
    if not os.path.isfile(config_name):
        with open('data_config.json', 'r', encoding='utf-8') as f:
            conf = json.load(f)
        f.close()
        print('Success!')
        return conf
    else:
        with open(config_name, 'r', encoding='utf-8') as f:
            conf = json.load(f)
        f.close()
        print('Success!')
        return conf


def GetColumnNumByHeaderIndex(Headers, ColumnName):
    return Headers.index(ColumnName) + 1


def FillColumnNameForNewFile(File, ColumnList):
    for i in range(len(ColumnList)):
        File.cell(1, i + 1, ColumnList[i])
    return File


def FetchUniqueCodeFromCurrentStock(CurrentStockExcelByPanda, CacheFileName, CurrentStockMap, CacheExpireTime):
    CurrentStockData = {"length": 0}
    UniqueCodeList=[]
    blacklist=get_uniquecode_blacklist()
    print('Fetching data from source table...')
    if CacheFileName in os.listdir("."):
        with open(CacheFileName, "r", encoding="utf-8") as CacheFile:
            CacheDict = json.load(CacheFile)
    else:
        CacheDict = {}
    with tqdm(total=len(CurrentStockExcelByPanda)) as pbar:
        for i in range(len(CurrentStockExcelByPanda)):
            unique_code = str(CurrentStockExcelByPanda.loc[i, CurrentStockMap["unique_code"]])
            if unique_code in blacklist:
                logging.warning("code=%s is in blacklist")
                continue
            item_id = str(CurrentStockExcelByPanda.loc[i, CurrentStockMap["item_id"]])
            item_size = str(CurrentStockExcelByPanda.loc[i, CurrentStockMap["item_size"]]).split('/')
            item_size = item_size[-1]
            order_id = str(CurrentStockExcelByPanda.loc[i, CurrentStockMap["order_id"]])
            if not IsUsedByLocalCache(CacheDict, unique_code, CacheExpireTime):
                order_status = str(CurrentStockExcelByPanda.loc[i, CurrentStockMap["order_status"]])
                item_condition = str(CurrentStockExcelByPanda.loc[i, CurrentStockMap["item_condition"]])
                key = '%s_%s_%s' % (order_id, item_id, item_size)
                item_condition = '瑕疵' if item_condition in ['A级', 'B级', 'C级'] else ''
                if key not in CurrentStockData.keys():
                    CurrentStockData[key] = []
                CurrentStockData[key].append((unique_code, order_status, item_condition))
                CurrentStockData["length"] += 1
            UniqueCodeList.append(unique_code)
            pbar.update(1)
    # with open('files/match.json', 'w') as f2:
    #     f2.write(json.dumps(CurrentStockData, ensure_ascii=False))
    print('Success!')
    time.sleep(1)
    return CurrentStockData,UniqueCodeList


def FillDataIntoRequirementFile(RequirementFileWorkSheet, CurrentStockData, RequirementExcelByPanda,
                                RequirementAllHeaders, RequirementMap, LogFileName):
    print('Filling data into dest table...')
    with open(LogFileName, "w+", encoding="utf-8") as log:
        with tqdm(total=len(RequirementExcelByPanda)) as pbar:
            for i in range(len(RequirementExcelByPanda)):
                if CurrentStockData["length"] != 0:
                    item_id = str(RequirementExcelByPanda.loc[i, RequirementMap["item_id"]])
                    item_size = str(RequirementExcelByPanda.loc[i, RequirementMap["item_size"]])
                    order_id = str(RequirementExcelByPanda.loc[i, RequirementMap["order_id"]])
                    origin_unique_code = RequirementExcelByPanda.loc[i, RequirementMap["unique_code"]]
                    unique_code = str(origin_unique_code)
                    condition = str(RequirementExcelByPanda.loc[i, RequirementMap["item_condition"]])
                    if order_id != "" and item_id != "" and item_size != "":
                        key = "%s_%s_%s" % (order_id, item_id, item_size)
                        if (
                        not pd.isnull(origin_unique_code)) and unique_code != "nan" and key in CurrentStockData.keys():
                            # 表2有寄存码且在表1存在 ，保持原寄存码和瑕疵情况
                            result = IsUniqueCodeExistedInSourceTable(key, unique_code, CurrentStockData)
                            if result != -1:
                                CurrentStockData[key].pop(result)
                                RequirementFileWorkSheet.cell(i + 2, GetColumnNumByHeaderIndex(RequirementAllHeaders,
                                                                                               RequirementMap[
                                                                                                   "unique_code"]),
                                                              unique_code)
                                RequirementFileWorkSheet.cell(i + 2, GetColumnNumByHeaderIndex(RequirementAllHeaders,
                                                                                               RequirementMap[
                                                                                                   "item_condition"]),
                                                              condition)
                                log.writelines(
                                    ("[%s] already had a code which existed in source, code: %s\n" % (i, unique_code)))
                        elif key not in CurrentStockData.keys() and (
                                not pd.isnull(origin_unique_code)) and unique_code != 'nan':
                            # 表2有寄存码且在表1不存在，保持原来的寄存码和瑕疵情况
                            RequirementFileWorkSheet.cell(i + 2, GetColumnNumByHeaderIndex(RequirementAllHeaders,
                                                                                           RequirementMap[
                                                                                               "unique_code"]),
                                                          unique_code)
                            RequirementFileWorkSheet.cell(i + 2, GetColumnNumByHeaderIndex(RequirementAllHeaders,
                                                                                           RequirementMap[
                                                                                               "item_condition"]),
                                                          condition)
                            log.writelines(
                                ("[%s] already had a code,but no matched data, code: %s\n" % (i, unique_code)))
                        elif key not in CurrentStockData.keys() or len(CurrentStockData[key]) == 0:
                            # 表2无寄存码且在表1没有匹配的寄存码  寄存码和状态保持空
                            RequirementFileWorkSheet.cell(i + 2, GetColumnNumByHeaderIndex(RequirementAllHeaders,
                                                                                           RequirementMap[
                                                                                               "unique_code"]),
                                                          '')
                            RequirementFileWorkSheet.cell(i + 2, GetColumnNumByHeaderIndex(RequirementAllHeaders,
                                                                                           RequirementMap[
                                                                                               "order_status"]),
                                                          '')
                            log.writelines("[%s]doesn't have matched key. key: %s\n" % (i, key))
                        else:  # 表2的item在表1有匹配的寄存码，填充寄存码，状态，瑕疵情况
                            insert_data = CurrentStockData[key].pop()
                            RequirementFileWorkSheet.cell(i + 2, GetColumnNumByHeaderIndex(RequirementAllHeaders,
                                                                                           RequirementMap[
                                                                                               "unique_code"]),
                                                          insert_data[0])
                            RequirementFileWorkSheet.cell(i + 2, GetColumnNumByHeaderIndex(RequirementAllHeaders,
                                                                                           RequirementMap[
                                                                                               "order_status"]),
                                                          insert_data[1])
                            RequirementFileWorkSheet.cell(i + 2, GetColumnNumByHeaderIndex(RequirementAllHeaders,
                                                                                           RequirementMap[
                                                                                               "item_condition"]),
                                                          insert_data[2])
                            CurrentStockData["length"] -= 1
                            log.writelines('[%s]key: %s, value: %s, status: %s, condition: %s %s keys left\n' % (
                                i, key, insert_data[0], insert_data[1], insert_data[2], CurrentStockData["length"]))
                pbar.update(1)
    log.close()
    time.sleep(1)
    print('Success!')


def GenerateExportFile(RequirementFileByWorkBook,
                       ExportWorkBook,
                       RequirementAllHeaders,
                       RequirementMap,
                       AllUniqueCodeInStockFile,
                       config):
    files_path = "files/"
    print("Filtering result...")
    RequirementSheet = RequirementFileByWorkBook["Sheet1"]
    ExportWorkBook.create_sheet("Result")
    ExportColumns = config["ExportColumns"]
    CacheFile = files_path+"cache.json"
    CacheExpTime = 7200
    ExportSheet = ExportWorkBook[ExportWorkBook.sheetnames[0]]
    ExportSheet = FillColumnNameForNewFile(ExportSheet, ExportColumns)
    CurrentTimeStamp = getCurrentTimeStamp()
    if CacheFile in os.listdir("."):
        with open(CacheFile, "r", encoding="utf-8")as f:
            CacheDict = json.load(f)
        keys = list(CacheDict.keys())
        for i in keys:
            if CurrentTimeStamp - CacheDict[i] > CacheExpTime:
                CacheDict.pop(i)
    else:
        CacheDict = {}
    n = 2
    with tqdm(total=RequirementSheet.max_row) as pbar:
        for i in range(2, RequirementSheet.max_row):
            if RequirementSheet.cell(i, GetColumnNumByHeaderIndex(RequirementAllHeaders, RequirementMap[
                "unique_code"])).value in AllUniqueCodeInStockFile and \
                            [RequirementSheet.cell(i, GetColumnNumByHeaderIndex(RequirementAllHeaders,
                                                                                RequirementMap[
                                                                                    "item_condition"])).value,
                             RequirementSheet.cell(i, GetColumnNumByHeaderIndex(RequirementAllHeaders,
                                                                                RequirementMap["willing"])).value] in \
                            config["need_pairs"] \
                    and not IsUsedByLocalCache(CacheDict, RequirementSheet.cell(i, GetColumnNumByHeaderIndex(
                        RequirementAllHeaders, RequirementMap["unique_code"])).value, CacheExpTime):
                ExportSheet.cell(n, 1, RequirementSheet.cell(i, GetColumnNumByHeaderIndex(RequirementAllHeaders,
                                                                                          RequirementMap[
                                                                                              "item_name"])).value)
                ExportSheet.cell(n, 2, RequirementSheet.cell(i, GetColumnNumByHeaderIndex(RequirementAllHeaders,
                                                                                          RequirementMap[
                                                                                              "item_id"])).value)
                ExportSheet.cell(n, 3, RequirementSheet.cell(i, GetColumnNumByHeaderIndex(RequirementAllHeaders,
                                                                                          RequirementMap[
                                                                                              "item_size"])).value)
                ExportSheet.cell(n, 4, RequirementSheet.cell(i, GetColumnNumByHeaderIndex(RequirementAllHeaders,
                                                                                          RequirementMap[
                                                                                              "unique_code"])).value)
                ExportSheet.cell(n, 5, RequirementSheet.cell(i, GetColumnNumByHeaderIndex(RequirementAllHeaders,
                                                                                          RequirementMap[
                                                                                              "supplier"])).value)
                ExportSheet.cell(n, 6, "瑕疵" if str(
                    RequirementSheet.cell(i, GetColumnNumByHeaderIndex(RequirementAllHeaders, RequirementMap[
                        "item_condition"])).value) == "瑕疵" else "现货")
                CacheDict[str(RequirementSheet.cell(i, GetColumnNumByHeaderIndex(RequirementAllHeaders, RequirementMap[
                    "unique_code"])).value)] = CurrentTimeStamp
                n += 1
            pbar.update(1)
    with open(CacheFile, 'w', encoding="utf-8") as m:
        m.write(json.dumps(CacheDict, ensure_ascii=False))
    print('Success!')


def DownloadStockFile(date):
    filepath="files/"
    filename = merge_filename("stock/CurrentStock.xlsx", date)
    full_name=filepath+filename
    if full_name in os.listdir("."):
        os.remove(full_name)
    print("Downloading file: %s" % full_name)
    url = "https://agent.api.sneakerburgers.com/api/stockservice/product/exportproductstockdetail"
    payload = "{\"status\":0,\"type\":0}"
    headers = {
      "authority": "agent.api.sneakerburgers.com",
      "accept": "application/json, text/plain, */*",
      "authorization": "eyJpZCI6MjI0LCJyZWFsbmFtZSI6Inppa2nku6PnkIbllYYiLCJyb2xlbXNnIjoi5Luj55CG5ZWGIiwidXNlcm5hbWUiOiJ6aWtp5Luj55CG5ZWGIiwidXNlcnR5cGUiOjUsIndhcmVob3VzZWlkIjowfQ==",
      "user-agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
      "os": "web",
      "content-type": "application/json;charset=UTF-8",
      "origin": "https://agent.sneakerburgers.com",
      "sec-fetch-site": "same-site",
      "sec-fetch-mode": "cors",
      "sec-fetch-dest": "empty",
      "referer": "https://agent.sneakerburgers.com/",
      "accept-language": "zh-CN,zh;q=0.9"
    }
    headers["authorization"]=get_login_token()
    response = requests.request("POST", url, headers=headers, data=payload)
    with open(full_name, "wb") as code:
        code.write(response.content)
    print('Success!')