
import math
import asyncio
from urllib.parse import urlparse
import random
import os, sys, time
import numpy as np
from playwright.async_api import async_playwright
from playwright_stealth import stealth_async

from lib import until, helpers, material, proxy_ip
 
 


materialFunc = material.Material()

class BDkp():
    # def sync_main(self, loop_value):
    #     ctx = execjs.compile("""
    #          function say_hello() {
    #             pyCallJsTest()
    #          }
    #     """)
    #
    #     print(ctx.call("say_hello", "World"))
        
    def sync_main(self,loop_value):
        self.getDataPage = 1
        asyncio.run(self.main(loop_value))
        _time = time.strftime("%Y-%m-%d %H:%M", time.localtime())
        until.logger.info(f"{_time} 发包工作完成,抓包停止 ----  ")
        return 1

    async def main(self,loop_value):
        if loop_value == -1:
            while True:
                _time = time.strftime("%Y-%m-%d %H:%M", time.localtime())
                until.logger.info(f"{_time} 发包开始 ... ")
                await self.getkeywordList()
                await asyncio.sleep(3)
        if loop_value > 0:
            for i in range(loop_value):
                await self.getkeywordList()
                await asyncio.sleep(1)
        return 

    async def initialize(self):
        #初参化参数
        self.port_arr = {}
        self.click_js = {}
        self.hotkeyword_id = {}
        self.web_url = {}
        self.web_name = {}
        self.hotkeyword = {}
        self.contexts = {}
        self.browser_pos = {}        
        self.browser = {}
        self.bdConf = helpers.bdSetconfCache()
        self.bdConf['threads_max_num'] = int(self.bdConf['threads_max_num'])
        self.bdConf['memory_min_num'] = int(self.bdConf['memory_min_num'])
        self.bdConf['page_load_timeout'] = int(self.bdConf['page_load_timeout'])
        self.bdConf['depth_max'] = int(self.bdConf['depth_max'])
        self.bdConf['search_stay_min_num'] = int(self.bdConf['search_stay_min_num'])
        self.bdConf['search_stay_max_num'] = int(self.bdConf['search_stay_max_num'])

        # self.proxyIps = self.bdConf['proxy_url']
        self.proxyIps = {}
        self.page_max_depth = int(self.bdConf['depth_max'])

    async def getkeywordList(self):
        while True:
            await self.initialize()
            ret = self.get_keys_row(self.bdConf['threads_max_num'])
            if ret == -1100:
               materialFunc.updateWorkstateAllRec()
               self.getDataPage = 1
               continue
            elif ret == -1000:
                materialFunc.updateWorkstateAllRec()
                time.sleep(10)
                continue
            else:
                break

        # 网站域名，公司名称，词
        self.rows = np.array(ret)

        ports = []
        min_port = until.get_random_process_id()
        _range_num = self.bdConf['threads_max_num']
        if len(self.rows) > self.bdConf['threads_max_num']:
            _range_num = len(self.rows)

        for p in range(_range_num):
            ports.append(min_port + p) #生成端口

        tasks = []
        for i in range(len(ports)):
            p = ports[i]
            print(f"初始化端口号:{p}")
            if i < len(self.rows):
                self.port_arr[p] = p
                self.click_js[p] = 1
                self.hotkeyword_id[p] = self.rows[i]['id']
                self.web_url[p] = self.rows[i]['seourl']
                self.web_name[p] = self.rows[i]['web_name']
                self.hotkeyword[p] = self.rows[i]['seokeyword']
                tasks.append(asyncio.ensure_future(self.openBrower_sync(p)))

        await asyncio.gather(*tasks)
        # await asyncio.sleep(1000)
        _time = time.strftime("%Y-%m-%d %H:%M", time.localtime())
        until.logger.info(f"{_time} 进程任务完成结束 ----")

        
    # def open_browser(my_class_instance, p):
    #     asyncio.run(my_class_instance.openBrower_sync(p))

    async def openBrower_sync(self, i):
        index_key = port = self.port_arr[i]  # index_key = 11000
        options = {"channel": "chromium"}
        options["headless"] = True
        # options["headless"] = False
        options["args"] = [f'--remote-debugging-port={index_key}']  # 指定端口号为
        print(f"[{self.web_name[index_key]}||{self.hotkeyword[index_key]}],启动端口：{index_key}")
        playwright = await async_playwright().start()
        await playwright.chromium.launch(**options)

        print(f"http://127.0.0.1:{index_key}")
        # self.contexts[index_key] = browser.contexts[0]
        # # self.contexts[index_key] = await browser.new_context(proxy={'server':'http://{self.proxyIps[index_key]}'})
        # page = await self.contexts[index_key].new_page()
        # await page.goto('chrome://about')
        await self.page2Event(playwright,index_key)
        return 

    async def page2Event(self,playwright,index_key):

        total_mem, used_mem, available_mem = until.get_memory_info()
        _time = time.strftime("%Y-%m-%d %H:%M", time.localtime())
        print(f"当前时间：{_time}")
        print("----------- memory info ------------------------------")
        # print("Total memory: {:.2f} GB".format(total_mem / (1024 ** 3)))
        # print("Used memory: {:.2f} GB".format(used_mem / (1024 ** 3)))
        print("空闲内存: {:.2f} GB".format(available_mem / (1024 ** 2)))
        print("------------------------------------------------------")
        _total_mem =  math.ceil(available_mem / (1024 ** 2))
        until.logger.info(f"空闲内存: {_total_mem}MB")
        # return
        if _total_mem < self.bdConf['memory_min_num']:
            until.logger.error(f"-----------内存太低 {_total_mem}MB, 系统暂停,等 ... ------------------")
            print("内存太低, 系统暂停,等 ...")
            await asyncio.sleep(120)
            return

        for port in self.port_arr:
            try:
                _proxy_ip = ''
                if self.bdConf['proxy_url']:
                    for x in range(20):
                        #取最大限20次
                        _proxy_ip = await proxy_ip.get_ipaddress(self.bdConf['proxy_url'])
                        if _proxy_ip is None:
                            # raise Exception("error: 没有找到代理IP")
                            until.logger.info(f"----------- 未配置代理IP, 请谨慎使用 ------------------")
                            print("未配置代理IP, 请谨慎使用...")
                        elif until.TestPingStatus(_proxy_ip):
                            break
                        else:
                            print(f"代理IP{_proxy_ip}不可用，重新获取")
                            return

                self.browser[index_key] = await playwright.chromium.connect_over_cdp(
                    f"http://127.0.0.1:{index_key}")
                if _proxy_ip:
                    self.contexts[index_key] = await self.browser[index_key].new_context(proxy={'server':f'http://{_proxy_ip}'})
                    print(f"启用代理ip:{_proxy_ip}")
                    self.proxyIps[port] = _proxy_ip
                else:
                    self.contexts[index_key] = await self.browser[index_key].new_context()
                    self.proxyIps[port] = ''

                if self.web_url[port]:
                    contexts1 = self.contexts[port]
                    await self.__page2Event(contexts1,port)
                    await asyncio.sleep(1)
                await self.browser[index_key].close()
            except Exception as e:
                await self.browser[index_key].close()
                await asyncio.sleep(1)
                return 


    async def __page2Event(self,contexts1,index_key):
        try:
            num = random.choice(range(0, 5))
            tn = random.choice(until.TN)
            baidu_url = "https://www.baidu.com"
            pageurl = '%s?tn=%s' % (baidu_url, tn) if num >= 2 else baidu_url
            page2 = await contexts1.new_page()
            await stealth_async(page2)  # <-- here 指纹

            wait_for_ms = self.bdConf['page_load_timeout'] * 1000
            await page2.goto(url=pageurl)
            await page2.wait_for_selector(selector="body", timeout=wait_for_ms)    # wait_for_selector 毫秒
            await page2.locator("#kw:visible").fill(self.hotkeyword[index_key])
            await page2.locator("#kw:visible").press("Enter")
            await page2.wait_for_selector(selector=".tts-title", timeout=wait_for_ms)
            await self.pageClickMain(page2,index_key)
        except Exception as e:
            _, value, traceback = sys.exc_info()
            until.logger.error(f'[{self.web_name[index_key]}||{self.hotkeyword[index_key]}],打开百度出错！')
            until.logger.error(f'[{self.web_name[index_key]}||{self.hotkeyword[index_key]}],error line: {traceback.tb_lineno}')
            until.logger.error(f'[{self.web_name[index_key]}||{self.hotkeyword[index_key]}],error: {e}')
            return None

    # 找出刷词对应的id值
    async def elementIds(self,page2,index_key):
        try:
            await asyncio.sleep(3)
            await page2.wait_for_selector("#content_left>div.result")
            divs = await page2.query_selector_all("#content_left>div.result")
            if not divs:
                return None

            element_list = []
            for div in divs:
                element_id = await div.get_attribute("id")
                element_list.append(element_id)

            print(element_list)
            await page2.wait_for_selector("#content_left>div.result span.c-color-gray")
            spans = await page2.query_selector_all("#content_left>div.result span.c-color-gray")

            if not spans:
                return None
            for vspan, vdiv in zip(spans, divs):
                mContent = await vspan.text_content()  # 取元素的txt
                print(f"  >>>{mContent}")
                if self.web_url[index_key] in mContent or self.web_name[index_key] in mContent:
                    tarid = await vdiv.get_attribute('id')
                    element_list.remove(tarid)
                    return tarid, element_list
            return element_list
        except Exception as e:
            _, value, traceback = sys.exc_info()
            until.logger.error(f'[{self.web_name[index_key]}||{self.hotkeyword[index_key]}],找出刷词对应的id值 ！')
            until.logger.error(
                f'[{self.web_name[index_key]}||{self.hotkeyword[index_key]}],error line: {traceback.tb_lineno}')
            until.logger.error(f'[{self.web_name[index_key]}||{self.hotkeyword[index_key]}],error: {e}')
            return None

    # 随意点链接，加干扰操作
    async def randClick(self,page2,other_ids,index_key):
        clickTimes = random.choice(range(1, 3))
        # clickTimes = 1
        try:
            for c in range(1, clickTimes + 1):
                randId = random.choice(other_ids)
                mElement = f"div[id=\"content_left\"] div[id=\"{randId}\"] a"
                await page2.wait_for_selector(mElement)
                await page2.wait_for_selector("#content_left>div.result span.c-color-gray:visible")
                element_selector = await page2.query_selector("#content_left>div.result span.c-color-gray")
                await page2.evaluate("window.scrollBy(0,0)")
                # 获取div的坐标
                bounding_box = await element_selector.bounding_box()
                # 计算新的坐标，以便将div移动到浏览器可视区
                new_x = int(bounding_box['x'] + bounding_box['width'] / 2) - int(page2.viewport_size['width'] / 2)
                new_y = int(bounding_box['y'] + bounding_box['height'] / 2) - int(page2.viewport_size['height'] / 2)
                # 将页面滚动到新的坐标
                await page2.mouse.wheel(new_x, new_y)
                elex, eley = await until.element_offset_value(element_selector)
                until.logger.info(f'[{self.web_name[index_key]}||{self.hotkeyword[index_key]}],当前第{c}次加入干扰素')
                await until.elementMouseClick(page2, elex, eley)
                await asyncio.sleep(1)
        except Exception as e:
            print(f"error: {e}")
            raise e

    # 翻页
    async def clickNextPage(self,page2, pageid,index_key):
        await asyncio.sleep(3)
        await page2.evaluate("window.scrollBy(0,document.body.scrollHeight);")
        await page2.wait_for_selector("#page .n")
        element = await page2.query_selector("#page .n")
        if not element:
            return None
        print(f"[{self.web_name[index_key]}||{self.hotkeyword[index_key]}],点翻页按钮{pageid}")
        page_xpath = f"//div[@id='page']/div/a[{pageid}]/span"
        await page2.wait_for_selector(page_xpath)
        page_element = await page2.query_selector(page_xpath)
        elex, eley = await until.element_offset_value(page_element)
        await until.elementMouseClick(page2, elex, eley)

    async def pageClickMain(self,page2,index_key):
        try:
            materialFunc.click_update_data(self.hotkeyword_id[index_key])
            for pageid in range(1, self.bdConf['depth_max']):
                infos = await self.elementIds(page2,index_key)
                if infos is None:
                    continue
                if isinstance(infos, tuple):
                    #找到-目标-网站
                    until.logger.info(f'[{self.web_name[index_key]}||{self.hotkeyword[index_key]}],当前第{pageid}页找到-目标-网站')
                    tarid, other_ids = infos
                    # 点击目标网站
                    await page2.wait_for_selector("div[id='content_left'] div[id='{}'] a".format(tarid))
                    element = await page2.query_selector("div[id='content_left'] div[id='{}'] a".format(tarid))
                    elex, eley = await until.element_offset_value(element)
                    await until.elementMouseClick(page2, elex, eley)
                    await self.clickInTar(page2,index_key)
                    break
                else:
                    # 未找到 - 目标 - 网站
                    until.logger.info(f'[{self.web_name[index_key]}||{self.hotkeyword[index_key]}],当前第{pageid}页-没有-目标网站')
                    other_ids = infos
                    await self.randClick(page2,other_ids,index_key)
                    await self.clickNextPage(page2,pageid + 1,index_key)

            await asyncio.sleep(3)
        except Exception as e:
            _, value, traceback = sys.exc_info()
            until.logger.error(f'[{self.web_name[index_key]}||{self.hotkeyword[index_key]}],error line: {traceback.tb_lineno}')
            print(f'[{self.web_name[index_key]}||{self.hotkeyword[index_key]}],error line: {traceback.tb_lineno}')
            raise e
        await self.closeAllPage(index_key)


    async def closeAllPage(self,index_key):
        for label_page in self.contexts[index_key].pages:
            # if not 'chrome' in label_page.url:
            await label_page.bring_to_front()
            await label_page.close()

    # 内页阅读及随机点击链接
    async def clickInTar(self,page2,index_key):
        try:
            min = self.bdConf['search_stay_min_num'] * 60
            max = self.bdConf['search_stay_max_num'] * 60
            self.readTime = random.choice(range(min, max))
            mtime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))

            txt = f"------{mtime}::::[{self.web_name[index_key]}||{self.hotkeyword[index_key]}]>>>>>>> 第{self.click_js[index_key]}次成功阅读了目标网站 <<<<<<<<<<<<<<\n"
            txt += f"------{mtime}::::打开的链接 {page2.url}\n"

            # with open(pathfile, 'a') as file:
            #     file.write(txt)
            # st = time.perf_counter()
            until.logger.info(txt)
            materialFunc.click_update_data(self.hotkeyword_id[index_key],'success')
            _search_stay_min_num = self.bdConf['search_stay_min_num']
            _search_stay_max_num = self.bdConf['search_stay_max_num']
            clickTimes = random.randint(_search_stay_min_num, _search_stay_max_num)
            # persleep = int(self.readTime / clickTimes)
            alist = {}
            tagaList = {}
            for i in range(1, random.randint(1, 4)):
                alist[index_key] = []
                self.click_js[index_key] += 1
                until.logger.info(f'在目标页面做干扰,页面上停留{clickTimes}s...')
                await asyncio.sleep(clickTimes)
                tagaList[index_key] = await page2.query_selector_all("a")
                if len(tagaList[index_key]) == 0:
                   until.logger.info(f'未找到目标链接，重新获取页面...')
                   await asyncio.sleep(3)
                   continue

                for a in tagaList[index_key]:
                    if a:
                        neilian = await a.get_attribute('href')
                        if neilian:
                            rand_a_host = urlparse(neilian).path if not urlparse(neilian).netloc else urlparse(
                                neilian).netloc
                            if self.web_url[index_key] in str(rand_a_host):alist[index_key].append(a)

                if len(alist[index_key]) == 0:
                    rand_t = random.randint(_search_stay_min_num, _search_stay_max_num)
                    until.logger.info(f'在目标页面未找到链接,未做干扰,页面上停留{rand_t}s...')
                    await asyncio.sleep(rand_t)
                    break
                    
                rand_a = random.choice(alist[index_key])
                until.logger.info(f'随机的内页地址是{rand_a.get_attribute("href")}')
                until.logger.info(f'开始浏览内页')
                elex, eley = await until.element_offset_value(rand_a)
                await until.elementMouseClick(page2, elex, eley)
                await until.rand_scroll(page2)
                clickTimes = random.choice(range(_search_stay_min_num, _search_stay_max_num))
                await asyncio.sleep(clickTimes)
                page2.refresh()
        except Exception as e:
            _, value, traceback = sys.exc_info()
            until.logger.error(
                f'[{self.web_name[index_key]}||{self.hotkeyword[index_key]}],error line: {traceback.tb_lineno}')
            print(f'[{self.web_name[index_key]}||{self.hotkeyword[index_key]}],error line: {traceback.tb_lineno}')


    def get_keys_row(self,page_size=2):

        total = materialFunc.getTotal()
        if total == 0:
            return -1000

        max_p = math.ceil(total / page_size)
        if self.getDataPage > max_p:
            return -1100

        rows = materialFunc.getPageList(self.getDataPage, page_size)
        if rows == None:
            print("数据表记录为空，退出")
            # time.sleep(5)
            return -1000

        return rows
       


# if __name__ == '__main__':
#     # until.logger.info(f"程序启动时间 >>>>>>>>>>>>>>>>" + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())))
#     # c = BDkp()
#     until.option_init()
#     asyncio.run(BDkp().main())