import base64
import json
import logging
import os
import time
from PIL import Image as Img
import openpyxl
import requests
from datetime import datetime
import xlrd
from app.commons.config import async_exec
from app.commons.redis_session import redis_con
from app.service.merge_data.functions import GetColumnNumByHeaderIndex, ChangeNumToCharacter, FillColumnNameForNewFile
from app.service.merge_data.get_token import get_login_token, get_login_token_PJ
import pandas as pd
from openpyxl.drawing.image import Image
from openpyxl.drawing.spreadsheet_drawing import AnchorMarker, OneCellAnchor
from openpyxl.drawing.xdr import XDRPositiveSize2D
from openpyxl.utils.units import pixels_to_EMU


def make_picurl_key(c):
    return "P-%s" % (c)


def get_current_datetime():
    """
    :return: 20220308162706
    """
    now_time = datetime.now()
    str_time = now_time.strftime("%Y-%m-%d%X")
    return str_time.replace("-", "").replace(":", "")


def DownloadStockFile(filename,region="sh"):
    if filename in os.listdir("."):
        os.remove(filename)
    print("Downloading file: %s" % filename)
    url = "https://agent.api.sneakerburgers.com/api/stockservice/product/exportproductstockdetail"
    payload = "{\"status\":0,\"type\":0}"
    headers = {
        "authority": "agent.api.sneakerburgers.com",
        "accept": "application/json, text/plain, */*",
        "user-agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
        "os": "web",
        "content-type": "application/json;charset=UTF-8",
        "origin": "https://agent.sneakerburgers.com",
        "sec-fetch-site": "same-site",
        "sec-fetch-mode": "cors",
        "sec-fetch-dest": "empty",
        "referer": "https://agent.sneakerburgers.com/",
        "accept-language": "zh-CN,zh;q=0.9"
    }
    if region=="gz":
        headers["authorization"] = get_login_token_PJ()
    else:
        headers["authorization"] = get_login_token()
    response = requests.request("POST", url, headers=headers, data=payload)
    with open(filename, "wb") as code:
        code.write(response.content)
    print('Success!')


def DownloadPicture(url):
    if url == "":
        return None
    payload = {}
    headers = {
        'authority': 'files.sneakerburgers.com',
        'user-agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36',
        'accept': 'image/avif,image/webp,image/apng,image/*,*/*;q=0.8',
        'sec-fetch-site': 'same-site',
        'sec-fetch-mode': 'no-cors',
        'sec-fetch-dest': 'image',
        'referer': 'https://agent.sneakerburgers.com/',
        'accept-language': 'zh-CN,zh;q=0.9'
    }
    from requests.exceptions import ConnectionError, ReadTimeout
    try:
        response = requests.request("GET", url, headers=headers, data=payload, timeout=5)
    except (ReadTimeout):
        logging.warning(
            "urllib3.exceptions.ReadTimeoutError: HTTPSConnectionPool(host='files.sneakerburgers.com', port=443): Read timed out.")
        return None
    except (ConnectionError):
        logging.warning("Connection Error: (host='files.sneakerburgers.com', port=443)")
        return None
    return response.content


def GetPictureUrl(unique_code,region="sh"):
    logging.warning("start getting picture url")
    r = redis_con()
    key = make_picurl_key(unique_code)
    picurl = r.get(key)
    if picurl:
        logging.warning("[read from redis] key: %s, value: %s" % (key, picurl))
        return picurl.split("|"), "true"
    else:
        url1 = "https://agent.api.sneakerburgers.com/api/stockservice/product/stockproduct"
        url2 = "https://agent.api.sneakerburgers.com/api/stockservice/product/stockproductitem"
        url3 = "https://agent.api.sneakerburgers.com/api/stockservice/product/detailbysku"
        payload = "{\"couponid\":\"%s\",\"page\":1,\"size\":10,\"status\":0,\"type\":0}" % (unique_code)
        headers = {
            'authority': 'agent.api.sneakerburgers.com',
            'accept': 'application/json, text/plain, */*',
            'user-agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36',
            'os': 'web',
            'content-type': 'application/json;charset=UTF-8',
            'origin': 'https://agent.sneakerburgers.com',
            'sec-fetch-site': 'same-site',
            'sec-fetch-mode': 'cors',
            'sec-fetch-dest': 'empty',
            'referer': 'https://agent.sneakerburgers.com/',
            'accept-language': 'zh-CN,zh;q=0.9'
        }
        if region=="gz":
            headers["authorization"] = get_login_token_PJ()
        else:
            headers["authorization"] = get_login_token()
        logging.warning("headers: %s"%str(json.dumps(headers)))
        try:
            logging.warning("[API] %s | req: "%(url1) + payload)
            response1 = requests.request("POST", url1, headers=headers, data=payload.encode("utf-8").decode("latin1"), timeout=20)
            logging.warning("[API] %s | "%(url1) +
                            "resp: " + str(response1.text))
            # try:
                # productid = response1.json()["data"]["list"][0]["productid"]
            sku = response1.json()["data"]["list"][0]["sku"]
            payload3= "{\"sku\":\"%s\",\"couponid\":\"%s\",\"status\":0,\"type\":0}"%(sku,unique_code)
            logging.warning("[API] %s | req: "%(url3) + payload3)
            response3 = requests.request("POST", url3, headers=headers, data=payload3.encode("utf-8").decode("latin1"), timeout=20)
            logging.warning("[API] %s | "%(url3) +
                        "resp: " + str(response3.text))
            productid = response3.json()["data"][0]["productid"]
            payload2 = "{\"couponid\":\"%s\",\"page\":1,\"size\":10,\"status\":0,\"type\":0,\"productid\":%s}" % (
                unique_code, productid)
            logging.warning(
                "[API] %s | req: "%(url2) + payload2)
            response2 = requests.request("POST", url2, headers=headers, data=payload2.encode("utf-8").decode("latin1"), timeout=20)
            picurl = response2.json()["data"]["list"][0]["picurls"]
            logging.warning(
                "[API] %s | "%(url2) +
                "resp: " + str(response2.text))
            if isinstance(picurl, str):
                if picurl != "":
                    # logging.warning(
                    #     "[unique_code: %s | req: "%(picurl))
                    r.set(key, picurl)
                    r.expire(key, 86400 * 7)
                    return picurl.split("|"), "false"
                else:
                    return [],""
        except:
            logging.warning(
                "[ERROR] GetPicture Err | coupon_id: %s"%unique_code)
            return [], ""


def FilterWithMerchant(StockExcel, merchant):
    StockExcel = StockExcel[StockExcel["供应商"].isin([merchant])]
    return StockExcel


def getColValues(ws, column):
    rows = ws.max_row
    columndata = []
    for i in range(1, rows + 1):
        cellvalue = ws.cell(row=i, column=column).value
        columndata.append(cellvalue)
    return columndata


def getRowValues(ws, row):
    columns = ws.max_column
    rowdata = []
    for i in range(1, columns + 1):
        cellvalue = ws.cell(row=row, column=i).value
        rowdata.append(cellvalue)
    return rowdata


def FilterWithMerchantV2(StockExcel, merchant):
    return StockExcel


def offset_img(img, col, row):
    """精确设置图片位置，偏移量以万为单位进行微调吧，具体计算公式太麻烦了
    row column 的索引都是从0开始的，我这里要把图片插入到单元格B10
    """
    p2e = pixels_to_EMU
    h, w = img.height, img.width
    size = XDRPositiveSize2D(p2e(w), p2e(h))
    marker = AnchorMarker(col=col, colOff=60000, row=row, rowOff=0)
    img.anchor = OneCellAnchor(_from=marker, ext=size)


@async_exec
def match_picture(userid="test", merchant_list=None,region="sh"):
    logging.warning("match picture for %s, merchant_list:%s,region: %s" % (userid,merchant_list,region))
    path = "files/stock_with_pic/%s/" % str(userid)
    if region!="sh":
        path = "files/stock_with_pic/%s/%s/" % (region,str(userid))
    if not os.path.exists(path):
        os.makedirs(path)
    if merchant_list is None:
        merchant_list = ["郑浩滨"]
    if merchant_list == ["test"]:
        merchant_list = ["郑浩滨"]
    CurrentStockFileName = path + "CurrentStock.xlsx"
    DownloadStockFile(CurrentStockFileName,region)
    CurrentStock = openpyxl.load_workbook(CurrentStockFileName)
    CurrentSheet = CurrentStock["Sheet1"]
    merchant_list = set(merchant_list)
    for merchant in merchant_list:
        ExportWorkBook = openpyxl.Workbook()
        ExportWS = ExportWorkBook["Sheet"]
        rows = CurrentSheet.max_row
        headers = getRowValues(CurrentSheet, 1)
        ExportWS = FillColumnNameForNewFile(ExportWS, headers)
        defect_idx = len(headers)
        for i in range(len(headers)):
            if "库存类型" == headers[i]:
                defect_idx = i + 1
            if "供应商" == headers[i]:
                merchant_idx = i + 1
            if "寄存码" == headers[i]:
                coupon_idx = i + 1
        start_rows = 2
        code_url_map = {}
        max_length = 0
        for i in range(2, rows + 1):
            if CurrentSheet.cell(i, merchant_idx).value == merchant:
                defect_row = getRowValues(CurrentSheet, i)
                if CurrentSheet.cell(i, defect_idx).value == "瑕疵":
                    defect_code = str(CurrentSheet.cell(i, coupon_idx).value)
                    urls, is_from_cache = GetPictureUrl(defect_code,region)
                    if len(urls) > max_length:
                        max_length = len(urls)
                    code_url_map[defect_code] = urls
                    if is_from_cache == "false":
                        time.sleep(0.3)
                for i in range(len(defect_row)):
                    ExportWS.cell(start_rows, i + 1, defect_row[i])
                start_rows += 1
        ExportWS.insert_cols(idx=defect_idx + 1, amount=max_length)
        headers = getRowValues(ExportWS, 1)
        for i in range(len(headers)):
            if headers[i] is None:
                headers[i] = "瑕疵图"
        ExportWS = FillColumnNameForNewFile(ExportWS, headers)
        rows = ExportWS.max_row
        defect_pic_idx_head = 0
        for i in range(len(headers)):
            if "库存类型" == headers[i]:
                defect_idx = i + 1
            if "寄存码" == headers[i]:
                coupon_idx = i + 1
            if headers[i] == "瑕疵图" and defect_pic_idx_head == 0:
                defect_pic_idx_head = i
        for i in range(2, rows + 1):
            if ExportWS.cell(i, defect_idx).value == "瑕疵":
                defect_pic_idx = defect_pic_idx_head
                coupon_code = str(ExportWS.cell(i, coupon_idx).value)
                try:
                    urls = code_url_map[coupon_code]
                except:
                    urls = ["接口超时"]
                for url in urls:
                    if url == "接口超时":
                        ExportWS.cell(i, coupon_idx, "接口超时")
                    else:
                        product_img_dir = "files/product_img/"
                        imgs_from_disk = os.listdir(product_img_dir)
                        img_name_from_url = url.split("/")[-1]
                        img_name_from_url = img_name_from_url.split(".")[0] + ".jpeg"
                        pic_path = product_img_dir + img_name_from_url
                        if img_name_from_url not in imgs_from_disk and len(url) > 0:
                            img_content = DownloadPicture(url)
                            if img_content is None:
                                ExportWS.cell(i, defect_pic_idx_head + 1, "timeout")
                                continue
                            with open(pic_path, 'wb') as f:
                                f.write(img_content)
                            k = Img.open(pic_path).convert("RGB")
                            k.save(pic_path, "jpeg")
                        elif len(url) == 0:
                            continue
                        image = Image(pic_path)
                        ExportWS.row_dimensions[i].height = 25
                        image.width, image.height = (65, 27)
                        offset_img(image, defect_pic_idx, i - 1)
                        ExportWS.add_image(image)
                        logging.warning("coupon_code: %s, url: %s" % (coupon_code, url))
                        defect_pic_idx += 1
        merchant_with_id = merchant + '_' + get_current_datetime() + ".xlsx"
        path = "files/stock_with_pic/%s/" % str(userid)
        if region!="sh":
            path = "files/stock_with_pic/%s/%s/" % (region,str(userid))
        result_path = path + merchant_with_id
        ExportWorkBook.save(result_path)
        logging.warning("file output success! filename:%s" % (result_path))
        print("done! %s" % (result_path))


@async_exec
def match_picture_with_sku(userid="test", sku=None,region="sh"):
    path = "files/stock_with_pic/%s/" % str(userid)
    if region !="sh":
        path = "files/stock_with_pic/%s/%s/" % (region,str(userid))
    if not os.path.exists(path):
        os.makedirs(path)
    if sku is None:
        sku = "DJ4625-100"
    CurrentStockFileName = path + "CurrentStock.xlsx"
    DownloadStockFile(CurrentStockFileName)
    CurrentStock = openpyxl.load_workbook(CurrentStockFileName)
    CurrentSheet = CurrentStock["Sheet1"]
    ExportWorkBook = openpyxl.Workbook()
    ExportWS = ExportWorkBook["Sheet"]
    rows = CurrentSheet.max_row
    headers = getRowValues(CurrentSheet, 1)
    ExportWS = FillColumnNameForNewFile(ExportWS, headers)
    defect_idx = len(headers)
    for i in range(len(headers)):
        if "库存类型" == headers[i]:
            defect_idx = i + 1
        if "货号" == headers[i]:
            sku_idx = i + 1
        if "寄存码" == headers[i]:
            coupon_idx = i + 1
    start_rows = 2
    code_url_map = {}
    max_length = 0
    for i in range(2, rows + 1):
        if str(CurrentSheet.cell(i, sku_idx).value) == sku:
            defect_row = getRowValues(CurrentSheet, i)
            if CurrentSheet.cell(i, defect_idx).value == "瑕疵":
                defect_code = str(CurrentSheet.cell(i, coupon_idx).value)
                urls, is_from_cache = GetPictureUrl(defect_code,region)
                logging.warning("coupon code: %s, urls: %s" % (defect_code,urls))
                if len(urls) > max_length:
                    max_length = len(urls)
                code_url_map[defect_code] = urls
                if is_from_cache == "false":
                    time.sleep(0.3)
            for i in range(len(defect_row)):
                ExportWS.cell(start_rows, i + 1, defect_row[i])
            start_rows += 1
    ExportWS.insert_cols(idx=defect_idx + 1, amount=max_length)
    headers = getRowValues(ExportWS, 1)
    for i in range(len(headers)):
        if headers[i] is None:
            headers[i] = "瑕疵图"
    ExportWS = FillColumnNameForNewFile(ExportWS, headers)
    rows = ExportWS.max_row
    defect_pic_idx_head = 0
    for i in range(len(headers)):
        if "库存类型" == headers[i]:
            defect_idx = i + 1
        if "寄存码" == headers[i]:
            coupon_idx = i + 1
        if headers[i] == "瑕疵图" and defect_pic_idx_head == 0:
            defect_pic_idx_head = i
    for i in range(2, rows + 1):
        if ExportWS.cell(i, defect_idx).value == "瑕疵":
            defect_pic_idx = defect_pic_idx_head
            coupon_code = str(ExportWS.cell(i, coupon_idx).value)
            try:
                urls = code_url_map[coupon_code]
            except:
                urls = ["接口超时"]
            for url in urls:
                if url == "接口超时":
                    ExportWS.cell(i, coupon_idx, "接口超时")
                else:
                    product_img_dir = "files/product_img/"
                    imgs_from_disk = os.listdir(product_img_dir)
                    img_name_from_url = url.split("/")[-1]
                    img_name_from_url = img_name_from_url.split(".")[0] + ".jpeg"
                    pic_path = product_img_dir + img_name_from_url
                    if img_name_from_url not in imgs_from_disk and len(url) > 0:
                        img_content = DownloadPicture(url)
                        if img_content is None:
                            ExportWS.cell(i, defect_pic_idx_head + 1, "timeout")
                            continue
                        with open(pic_path, 'wb') as f:
                            f.write(img_content)
                        k = Img.open(pic_path).convert("RGB")
                        k.save(pic_path, "jpeg")
                    elif len(url) == 0:
                        continue
                    image = Image(pic_path)
                    ExportWS.row_dimensions[i].height = 25
                    image.width, image.height = (65, 27)
                    offset_img(image, defect_pic_idx, i - 1)
                    ExportWS.add_image(image)
                    logging.warning("coupon_code: %s, url: %s" % (coupon_code, url))
                    defect_pic_idx += 1
    merchant_with_id = sku + '_' + get_current_datetime() + ".xlsx"
    path = "files/stock_with_pic/%s/" % str(userid)
    if region !="sh":
        path = "files/stock_with_pic/%s/%s/" % (region,str(userid))
    result_path = path + merchant_with_id
    ExportWorkBook.save(result_path)
    logging.warning("file output success! filename:%s" % (result_path))
    print("done! %s" % (result_path))

def test_GetPictureUrl(unique_code,region="sh"):
    logging.warning("start getting picture url")
    # r = redis_con()
    # key = make_picurl_key(unique_code)
    # picurl = r.get(key)
    if False:
        pass
        # logging.warning("[read from redis] key: %s, value: %s" % (key, picurl))
        # return picurl.split("|"), "true"
    else:
        url1 = "https://agent.api.sneakerburgers.com/api/stockservice/product/stockproduct"
        url2 = "https://agent.api.sneakerburgers.com/api/stockservice/product/stockproductitem"
        url3 = "https://agent.api.sneakerburgers.com/api/stockservice/product/detailbysku"
        payload = "{\"couponid\":\"%s\",\"page\":1,\"size\":10,\"status\":0,\"type\":0}" % (unique_code)
        headers = {
            'authority': 'agent.api.sneakerburgers.com',
            'accept': 'application/json, text/plain, */*',
            'user-agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36',
            'os': 'web',
            'content-type': 'application/json;charset=UTF-8',
            'origin': 'https://agent.sneakerburgers.com',
            'sec-fetch-site': 'same-site',
            'sec-fetch-mode': 'cors',
            'sec-fetch-dest': 'empty',
            'referer': 'https://agent.sneakerburgers.com/',
            'accept-language': 'zh-CN,zh;q=0.9'
        }
        if region=="gz":
            headers["authorization"] = "eyJhZ2VudGlkIjo3MzYsImlkIjo3OTcsInJlYWxuYW1lIjoi5r2Y5a626KGMIiwicm9sZW1zZyI6Iui/kOiQpSIsInVzZXJuYW1lIjoiUEoiLCJ1c2VydHlwZSI6MSwid2FyZWhvdXNlaWQiOjB9"
        else:
            headers["authorization"] = get_login_token()
        logging.warning("headers: %s"%str(json.dumps(headers)))
        logging.warning("[API] %s | req: "%(url1) + payload)
        response1 = requests.request("POST", url1, headers=headers, data=payload, timeout=20)
        logging.warning("[API] %s | "%(url1) +
                        "resp: " + str(response1.text))
        # try:
            # productid = response1.json()["data"]["list"][0]["productid"]
        sku = response1.json()["data"]["list"][0]["sku"]
        payload3= "{\"sku\":\"%s\",\"couponid\":\"%s\",\"status\":0,\"type\":0}"%(sku,unique_code)
        logging.warning("[API] %s | req: "%(url3) + payload3)
        response3 = requests.request("POST", url3, headers=headers, data=payload3, timeout=20)
        logging.warning("[API] %s | "%(url3) +
                    "resp: " + str(response3.text))
        productid = response3.json()["data"][0]["productid"]
        payload2 = "{\"couponid\":\"%s\",\"page\":1,\"size\":10,\"status\":0,\"type\":0,\"productid\":%s}" % (
            unique_code, productid)
        logging.warning(
            "[API] %s | req: "%(url2) + payload2)
        response2 = requests.request("POST", url2, headers=headers, data=payload2, timeout=20)
        picurl = response2.json()["data"]["list"][0]["picurls"]
        logging.warning(
            "[API] %s | "%(url2) +
            "resp: " + str(response2.text))
        if isinstance(picurl, str):
            if picurl != "":
                # r.set(key, picurl)
                # r.expire(key, 86400 * 7)
                return picurl.split("|"), "false"
    # except:
    #     logging.warning(
    #         "[ERROR] GetPicture Err | coupon_id: %s"%unique_code)
    #     return [], ""

if __name__ == '__main__':
    test_GetPictureUrl(59135519868,"gz")
