import requests
import random
import ddddocr
import urllib3
from PIL import Image
from playwright.sync_api import sync_playwright


class ChinaMarathon:
    def __init__(self):
        self.result = ""
        self.http = urllib3.PoolManager()

    def get_proxy(self):
        p = requests.get(
            "http://api.dmdaili.com/dmgetip.asp?apikey=062c68ee&pwd=2ceacf762585db3e1d24cbdb4ef11091&getnum=1&httptype=1&geshi=1&fenge=1&fengefu=&operate=all&setcity=scchengdu").text
        return p

    def get_score(self, param):
        user_name = param.get("spider_config", {}).get("user_name", "")
        card_num = param.get("spider_config", {}).get("card_num", "")
        # proxy = {
        #     "server": self.get_proxy()
        # }
        with sync_playwright() as p:
            browser = p.chromium.launch(headless=True,
                                        # proxy=proxy,
                                        args=[
                                            "--disable-blink-features=AutomationControlled",
                                            "--disable-infobars"
                                        ]
                                        )

            def get_random_user_agent():
                agents = [
                    "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36",
                    "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; AcooBrowser; .NET CLR 1.1.4322; .NET CLR 2.0.50727)",
                    "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Acoo Browser; SLCC1; .NET CLR 2.0.50727; "
                ]
                return random.choice(agents)

            def stealth_page(context):
                page = context.new_page()
                page.add_init_script("""
                    Object.defineProperty(navigator, 'webdriver', {
                        get: () => undefined
                    })
                """)
                return page
            context = browser.new_context(
                # user_agent=get_random_user_agent(),
                viewport={"width": 1366, "height": 768}
            )
            page = stealth_page(context)

            page.goto("https://www.runchina.org.cn/#/data-score/public-score/list")
            page.locator('input.el-input__inner').nth(0).type(user_name, delay=100)
            page.locator('input.el-input__inner').nth(1).type(card_num, delay=100)
            page.click('xpath=//*[@class="search-button"]')

            def on_response(response):
                if '/cap_union_new_getcapbysig' in response.url and response.status == 200:
                    if 'img_index=1' in response.url:
                        with open("bg_picture.jpg", "wb") as f:
                            f.write(self.http.request("GET", response.url).data)
                    elif 'img_index=0' in response.url:
                        with open("cut_picture.png", "wb") as f:
                            f.write(self.http.request("GET", response.url).data)
                if 'mineRaceScoreWithToken' in response.url and response.status == 200:
                    self.result = response.text()

            page.on('response', on_response)

            def get_gap_offset():
                det = ddddocr.DdddOcr(det=False, ocr=False, show_ad=False)
                img = Image.open('cut_picture.png')
                region = img.crop((160, 508, 243, 595))  #
                region.save(f'cut_picture.png')
                with open('bg_picture.jpg', 'rb') as f:
                    target_bytes = f.read()
                with open('cut_picture.png', 'rb') as f:
                    background_bytes = f.read()
                res = det.slide_match(target_bytes, background_bytes, simple_target=True)
                distance = int(res['target'][0])
                return distance

            def get_track_list(distance):
                distance -= 30
                a = distance / 4
                a1 = a + random.uniform(-0.01, 0.01)
                a2 = a + random.uniform(-0.01, 0.01)
                a3 = a + random.uniform(-0.01, 0.01)
                a4 = a + random.uniform(-0.01, 0.01)
                track = [a1, a2, a3, a4]
                return track

            def move_down(page):
                new_frame = page.frame_locator('iframe[id="tcaptcha_iframe_dy"]')
                move_tag = new_frame.locator('xpath=//*[@id="tcOperation"]/div[6]')
                box = move_tag.bounding_box()
                page.mouse.move(box["x"] + box["width"] / 2, box["y"] + box["height"] / 2)
                page.mouse.down()
                page.wait_for_timeout(500)
                x = box["x"] + box["width"] / 2
                distance = int(get_gap_offset() / 1.97)
                move_distance = get_track_list(distance)
                for i in move_distance:
                    x += i
                    page.mouse.move(x, box["y"])
                    wait_time = random.uniform(40, 50)
                    page.wait_for_timeout(wait_time)
                page.mouse.up()

            move_down(page)
            page.wait_for_timeout(3500)
            page.remove_listener('response', on_response)
            browser.close()
        result_data = {"serialNumber": param.get("serialNumber"), "webType": param.get("web_type"), "crawlerType": param.get("crawler_type"), "data": str(self.result)}
        try:
            resp = requests.post("http://132.232.166.135:8905/api/scrapy/dealData", json=result_data,
                                 headers={"Content-Type": "application/json"})
            print("数据推送成功")
        except Exception as e:
            print(e)


if __name__ == "__main__":
    param = {
        "serialNumber": "111111",
        "crawler_type": "score",
        "web_type": "malasong",
        'spider_config': {"user_name": "杜璐", "card_num": "511321198612302823"}
    }
    a = ChinaMarathon()
    b=a.get_score(param)
    pass