# -*- coding: utf-8 -*-
# 创建时间：2021/7/15 18:21

from selenium import webdriver
import time
import random
import uuid
from selenium.webdriver import ActionChains

import os, re, io

import numpy as np
import requests
import sys
import traceback, base64
import json, urllib
import  hashlib
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
import PyChromeDevTools
from selenium.webdriver.support.select import Select

from selenium.webdriver.common.keys import Keys


from bs4 import BeautifulSoup


# 案例
def Email(port):
    try:
        options = webdriver.ChromeOptions()
        print("开始加载浏览器配置")

        d = DesiredCapabilities.CHROME
        d['loggingPrefs'] = {'performance': 'ALL'}
        options.add_experimental_option("debuggerAddress", "127.0.0.1:"+port)

        browser = webdriver.Chrome(chrome_options=options, desired_capabilities=d)
        chrome = PyChromeDevTools.ChromeInterface(host="127.0.0.1", port=int(port))

        chrome.Network.enable()
        chrome.Page.enable()

        time.sleep(10)

        while True:
            try:

                if browser.find_element_by_xpath('//*[@id="formGeral:tabelaListaMovimentos_data"]'):
                    # browser.refresh()
                    browser.find_element_by_xpath('//*[@id="extratoDatePicker"]').click()
                    time.sleep(3)
                    browser.find_element_by_xpath('/html/body/div[9]/div[1]/div/button[1]').click()

                    time.sleep(10)
                    html=browser.find_element_by_xpath('//*[@id="formGeral:tabelaListaMovimentos_data"]').get_attribute("innerHTML")
                    alllist = []
                    soup = BeautifulSoup(html)
                    #print(soup.prettify())
                    tr_obj_list = soup.find_all("tr", class_=["ui-widget-content ui-datatable-even","ui-widget-content ui-datatable-odd"])
                    #print(tr_obj_list)
                    #print(len(tr_obj_list))
                    for ii in tr_obj_list:
                        #print(ii)
                        tdlist = []
                        tdlist.append(ii.get("data-ri"))

                        td_obj_list = ii.find_all("td")
                        #print(td_obj_list)

                        for iii in td_obj_list:
                            tdlist.append(iii.text.strip())
                        #print(tdlist)
                        #print("|".join(tdlist))
                        alllist.append("|".join(tdlist))

                    headers = {
                        "user-agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
                        "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
                        "Content-Type": "text/html; charset=UTF-8"}
                    resp = requests.post("https://bra.polymerizations.com/poi/show_code/getpypost",
                                         data=json.dumps({"all": alllist}), headers=headers)
                    #print(resp.text)
                    time.sleep(40)#刷新时间


            except Exception as e:
                print(e)


            time.sleep(3)


    except Exception as e:
        # browser.quit()

        print(traceback.print_exc())
        print(str(e))
        return False

    # browser.quit()


port=""
with open(sys.path[0].replace("\\","/")+"/DevToolsActivePort","r")as f:
    data=f.read()
    print(data)
    port=data.split("\n")[0]

#Email(port=port)


