from bs4 import BeautifulSoup
from scrapy.http import Request
from crawler.base_spider import Account, BaseSpider
from constants import Kvbin
import scrapy
import random
import re
import os
from scrapy.http import Request
from extentions.kvbin.encrypt import encrypt
from urllib.parse import urlencode, quote
from typing import Optional
from utils.files import mkdir


class KvbinSpider(BaseSpider):
    name = Kvbin.NAME
    start_urls = [Kvbin.BASE_URL]
    req_id: Optional[str] = None
    encrypt_key: Optional[str] = None
    turing_id: Optional[str] = None
    captcha_code: Optional[str] = None
    retry_count: int = 0
    turing_captcha_image_url: Optional[str] = None
    account_data_dir: Optional[str] = None
    captcha_image_path: Optional[str] = None

    def __init__(self, account: Account):
        super().__init__()
        self.account = account
        self.account_data_dir = os.path.join(self.data_dir, self.account.name)
        mkdir(self.account_data_dir)
        self.captcha_image_path = os.path.join(self.account_data_dir, "captcha.png")

    def start_requests(self):
        assert self.account is not None, "Please init the account info first!"
        self.logger.info("Start crawlling {} using {}'s account".format(self.name, self.account.name))
        yield self.request_encrypt_key_and_req_id()

    def request_encrypt_key_and_req_id(self):
        self.logger.info("Request encrypt key and req id")
        return scrapy.Request(
            Kvbin.ENCRYPT_KEY_URL,
            method="POST",
            headers=Kvbin.HEADERS,
            cookies=Kvbin.COOKIES,
            callback=self.parse_encrypt_key_and_req_id_and_get_turing_id)

    def parse_encrypt_key_and_req_id_and_get_turing_id(self, response):
        text = response.text
        encrypt_key_match = re.search(r'parent.document.frmtoken.fldEncrKey.value = "(.*?)";', text)
        req_id_match = re.search(r'parent.document.frmLogon.fldLgnReqId.value = "(.*?)";', text)
        if encrypt_key_match:
            self.encrypt_key = encrypt_key_match.group(1)
        if req_id_match:
            self.req_id = req_id_match.group(1)
        self.logger.info("Get encrypt key: {}, req id: {}".format(self.encrypt_key, self.req_id))
        self.logger.info("Request turing id")
        return Request(
            Kvbin.TURING_SESSION_URL,
            method="POST",
            headers=Kvbin.HEADERS,
            cookies=Kvbin.COOKIES,
            callback=self.parse_turing_id_and_get_turing_captcha_image)

    def parse_turing_id_and_get_turing_captcha_image(self, response):
        turing_id_match = re.search(r'l_turingid = \'(.*?)\';', response.text)
        if turing_id_match:
            self.turing_id = quote(turing_id_match.group(1))
        self.logger.info("Get turing id: {}".format(self.turing_id))
        self.logger.info("Request turing captcha image")
        turing_captcha_image_url = "{}&fldidturing={}&fldData=".format(Kvbin.TURING_IMAGE_BASE_URL, self.turing_id)
        print(turing_captcha_image_url)
        return scrapy.Request(
            turing_captcha_image_url,
            method="POST",
            headers=Kvbin.HEADERS,
            cookies=Kvbin.COOKIES,
            callback=self.parse_captcha_image_and_login)

    def parse_captcha_image_and_login(self, response):
        try:
            self.logger.info("Save captcha image: {}".format(self.captcha_image_path))
            self.save_captcha_image(response, self.captcha_image_path)
        except Exception as e:
            self.logger.error("Save captcha image failed: {}".format(str(e)))
        self.captcha_code = self.recognize_image_code(self.captcha_image_path, "10111").upper()
        self.logger.info("Get captcha code: {}".format(self.captcha_code))
        encrypted_password = encrypt(self.account.password, self.encrypt_key)
        self.logger.info("Get encrypt password: {}".format(encrypted_password))
        data = {
            "fldLoginUserId": self.account.name,
            "fldPassword": encrypted_password,
            "fldlitever": "L",
            "fldcaptcha": self.captcha_code,
            "fldDeviceId": "01",
            "fldLangId": "eng",
            "fldRequestId": "RROGN01",
            "fldudf": "fldlitever",
            "fldLgnReqId": self.req_id,
            "fldturing": self.turing_id
        }
        self.logger.info("Login form data: {}".format(data))
        body = urlencode(data)
        return Request(
            url=Kvbin.BASE_URL,
            method="POST",
            headers=Kvbin.HEADERS,
            cookies=Kvbin.COOKIES,
            body=body,
            callback=self.after_login
        )

    def after_login(self, response):
            text = response.text
            soup = BeautifulSoup(text, 'html.parser')
            login_result_html_path = os.path.join(self.account_data_dir, "login-result_{}.html".format(self.retry_count))
            with open(login_result_html_path, "w") as f:
                f.write(text)
            invalid_captcha_element = soup.find(text="Invalid Captcha.")
            if invalid_captcha_element and self.retry_count < self.max_retry_count:
                self.logger.warning("Captcha not correct, retry, retry count: {}".format(self.retry_count))
                self.retry_count += 1
                return scrapy.Request(
                self.turing_captcha_image_url,
                method="POST",
                headers=Kvbin.HEADERS,
                cookies=Kvbin.COOKIES,
                callback=self.parse_captcha_image_and_login)