import os
import re
from contextlib import closing

import aiofiles
import pymysql
from fastapi import APIRouter
from re_common.baselibrary.database.mysql import Mysql
from re_common.baselibrary.tools.all_requests.aiohttp_request import AioHttpRequest
from re_common.baselibrary.tools.all_requests.mrequest import MRequest
from re_common.baselibrary.tools.myparsel import MParsel
from re_common.baselibrary.utils.basedir import BaseDir
from re_common.baselibrary.utils.baseencode import BaseEncode
from re_common.baselibrary.utils.basefile import BaseFile
from re_common.baselibrary.utils.core.mlamada import bools_string

from apps.allsubdb.pdf_soopat.models import PdfSoopatLogin, headers, PdfSoopatDownPdf
from apps.core.m_route import ContextIncludedRoute
from apps.core.return_info import InputInfoModel, ReturnInfo
from apps.crawler_platform.util.requestapihelper import RequestApiHelper

router = APIRouter(route_class=ContextIncludedRoute)


@router.post("/step1/login")
async def login(input: InputInfoModel[PdfSoopatLogin]):
    """
       登录
       :param proxy: 代理
       :param username:用户名
       :param password:密码
       :return:
    """
    proxy = input.proxy
    username = input.data.username
    password = input.data.password
    marks = input.data.marks
    url = r"http://t.soopat.com/index.php?mod=login"
    this_header = headers.copy()
    return_info = ReturnInfo()
    cookie = None
    rrq = RequestApiHelper.get_rrq()
    kwargs = {
        "rrq": rrq,
        "header": this_header,
        "url": url,
        "timeout": 30,
        "marks": marks,
        "proxy": proxy,
        "moths": MRequest.GET,
        "middler_list": [rrq.status_code_middlerwares, rrq.marks_middlerwares]
    }
    bools, dicts = await RequestApiHelper.etl_remote_meta(**kwargs)
    # rrq = AioHttpRequest()
    # rrq.set_url(url).set_header(this_header) \
    #     .set_timeout(30).set_marks(marks) \
    #     .set_proxy(proxy) \
    #     .set_middler_list(
    #     [rrq.status_code_middlerwares, rrq.marks_middlerwares])
    # bools, dicts = await rrq.run(MRequest.GET)
    if bools:
        xpath_dicts = {"FORMHASH": "//input[@name='FORMHASH']/@value"}
        mc = MParsel(html=rrq.html)
        tmp = mc.xpath_parsel(sel=mc.sel, xpath_selector=xpath_dicts)
        data = {
            "username": username,
            "password": password,
            "loginType": "share",
            "FORMHASH": tmp["FORMHASH"][0],
            "return_url": "http://www.soopat.com/Home/Index",
        }
        url = r"http://t.soopat.com/index.php?mod=login&code=dologin"
        rrq = AioHttpRequest()
        rrq.allow_redirects = False
        rrq.set_url(url).set_allow_resp_text(False).set_header(this_header) \
            .set_data(data) \
            .set_timeout(30).set_marks(marks) \
            .set_proxy(proxy) \
            .set_middler_list([])
        bools, dicts = await rrq.run(MRequest.POST)
        if bools:
            items = rrq.resp.cookies.items()
            cookie = dict([(value.key, value.coded_value) for key, value in items])
            # cookie = dict([l.split("=", 1) for l in cookie.split("; ")])
    return_info.status = bools_string(bools)
    return_info.msg_code = dicts["code"]
    return_info.msg = dicts["msg"]
    return_info.data = {"cookie": cookie}
    return return_info.todict()


@router.post("/step2/download_pdf")
async def download_pdf(input: InputInfoModel[PdfSoopatDownPdf]):
    """
       根据申请号下载pdf
       :param proxy: 代理
       :param appnum:申请号
       :param cookie:登录cookie
       :return:
    """
    proxy = input.proxy
    appnum = input.data.appnum
    cookie = input.data.cookie
    marks = input.data.marks
    soonum = appnum[:appnum.index(".")].replace("CN", "")
    url = r"http://www.soopat.com/Home/DownloadChoice/{}".format(soonum)
    this_header = headers.copy()
    return_info = ReturnInfo()
    rrq = RequestApiHelper.get_rrq()
    kwargs = {
        "rrq": rrq,
        "header": this_header,
        "url": url,
        "timeout": 30,
        "marks": marks,
        "cookies": cookie,
        "proxy": proxy,
        "moths": MRequest.POST,
        "middler_list": [rrq.status_code_middlerwares, rrq.marks_middlerwares]
    }
    bools, dicts = await RequestApiHelper.etl_remote_meta(**kwargs)
    # rrq = AioHttpRequest()
    # rrq.set_url(url).set_cookies(cookie).set_header(this_header) \
    #     .set_timeout(30).set_marks(marks) \
    #     .set_proxy(proxy) \
    #     .set_middler_list(
    #     [rrq.status_code_middlerwares, rrq.marks_middlerwares])
    # bools, dicts = await rrq.run(MRequest.GET)
    info = None
    if bools:
        xpath_dicts = {"downurl": "//table[1]/tr[3]/td/a/@href"}
        mc = MParsel(html=rrq.html)
        tmp = mc.xpath_parsel(sel=mc.sel, xpath_selector=xpath_dicts)
        url = "http://www.soopat.com" + tmp["downurl"][0]

        async def resp_hook(self, resp):
            if resp.status == 200:
                if self.resp_encoding is not None:
                    self.html = await resp.text(encoding=self.resp_encoding, errors=self.resp_errors)
                else:
                    self.html = await resp.text(errors=self.resp_errors)

        rrq = AioHttpRequest()
        rrq.set_url(url).set_resp_hook(resp_hook) \
            .set_allow_redirects(False) \
            .set_allow_resp_text(False) \
            .set_cookies(cookie) \
            .set_header(this_header) \
            .set_timeout(30).set_marks(marks) \
            .set_proxy(proxy) \
            .set_middler_list([])
        bools, dicts = await rrq.run(MRequest.GET)
        if bools and rrq.resp.status == 302:
            pdfurl = rrq.resp.headers["Location"]
            this_header["Host"] = "122.97.232.170:1522"

            async def resp_hook2(self, resp):
                if resp.headers['Content-Type'] == "application/pdf":
                    full_path = self.full_path
                    with open(full_path, 'wb') as fd:
                        while True:
                            data = await resp.content.read(1024)
                            if not data:
                                break
                            fd.write(data)

            rrq = AioHttpRequest()
            rrq.set_url(pdfurl).set_cookies(cookie).set_header(this_header) \
                .set_resp_hook(resp_hook2) \
                .set_timeout(30) \
                .set_allow_resp_text(False) \
                .set_proxy(proxy) \
                .set_middler_list(
                [rrq.status_code_middlerwares])
            full_path = r"\\192.168.31.123\home\cjvip\qinym\soopat\pdf\{}".format(appnum + ".pdf")
            rrq.full_path = full_path
            bools, dicts = await rrq.run(MRequest.GET)
            if bools and os.path.exists(full_path):
                filesize = os.path.getsize(full_path)
                if filesize <= 15 * 1024:
                    os.remove(full_path)
                    dicts["msg"] = "下载文件大小小于15KB"
                else:
                    info = {}
                    info["appnum"] = appnum
                    info["soonum"] = soonum
                    info["savepath"] = full_path
        else:
            if "依次点击图片上" in rrq.html:
                dicts["msg"] = "出现点击图片验证码"
            elif 'type="password"' in rrq.html:
                dicts["msg"] = "重新登录"
            else:
                dicts["msg"] = "未知异常"
    return_info.status = bools_string(bools)
    return_info.msg_code = dicts["code"]
    return_info.msg = dicts["msg"]
    return_info.data = {"info": info}
    return return_info.todict()


# @router.get("/step3/download_image")
# async def download_image(proxy=None):
#     url = r"http://www.soopat.com/ValidateImage"
#     this_header = headers.copy()
#     return_info = ReturnInfo()
#     info = None
#
#     async def resp_hook(self, resp):
#         if resp.headers['Content-Type'] == "image/gif":
#             content = await resp.content.read()
#             fname = BaseEncode.get_byte_md5_value(content)
#             save_path = BaseFile.get_new_path(r"\\192.168.31.123\home\cjvip\qinym\soopat\image", fname[0:2])
#             BaseDir.create_dir(save_path)
#             image_path = BaseFile.get_new_path(save_path,fname + ".gif")
#             async with aiofiles.open(image_path, 'wb') as f:
#                 await f.write(content)
#             self.imageInfo = {
#                 "fname": fname,
#                 "image_path": image_path
#             }
#
#     rrq = AioHttpRequest()
#     rrq.set_url(url).set_header(this_header) \
#         .set_resp_hook(resp_hook) \
#         .set_timeout(30) \
#         .set_allow_resp_text(False) \
#         .set_proxy(proxy) \
#         .set_middler_list(
#         [rrq.status_code_middlerwares])
#     bools, dicts = await rrq.run(MRequest.GET)
#     if bools:
#         info = rrq.imageInfo
#         sql = "INSERT INTO soopat_image (imgmd5,save_path,repeat_cnt) VALUES ('{}','{}',{}) " \
#               "ON DUPLICATE KEY UPDATE repeat_cnt=repeat_cnt+1; ".format(info["fname"],
#                                                                          Mysql.escape(info["image_path"]), 1)
#         await down_product_conn.execute(sql)
#     return_info.status = bools_string(bools)
#     return_info.msg_code = dicts["code"]
#     return_info.msg = dicts["msg"]
#     return_info.data = {"info": info}
#     return return_info.todict()
