import time
from selenium import webdriver
import string
import zipfile

import requests


def get_proxy():
    return requests.get("http://127.0.0.1:5010/get/").json()


def delete_proxy(proxy):
    requests.get("http://127.0.0.1:5010/delete/?proxy={}".format(proxy))


# your spider code

def getHtml():
    # ....
    retry_count = 5
    proxy = get_proxy().get("proxy")
    while retry_count > 0:
        try:
            options = webdriver.ChromeOptions()
            options.add_argument("--proxy-server={}".format("http://{}".format(proxy)))

            driver = webdriver.Chrome(executable_path='D:\code\刷票\chromedriver.exe', chrome_options=options)
            url = "http://vote.e23.cn/zhuanti/jnncp2020shibao/show_1.html?openid=&openidMd5=&subscribe=undefined&group=1"
            driver.maximize_window()
            driver.get(url)
            try:
                driver.execute_script('window.scrollTo(0,1000)')  # 滑到1000的位置
                # time.sleep(2)
                driver.find_element_by_xpath('//*[@id="searchContent"]').send_keys('128')
                driver.find_element_by_id('searchBtn').click()
                time.sleep(1)
                driver.find_element_by_xpath('//*[@value="128"]').click()
                driver.find_element_by_xpath('//*[@id="form1"]/div[4]/div/div[2]').click()
                time.sleep(1)

                driver.find_element_by_xpath('//*[@id="layui-layer1"]/div[3]/a').click()
            except Exception as e:
                print(e)

            driver.quit()
        except Exception:
            retry_count -= 1
    # 出错5次, 删除代理池中代理
    delete_proxy(proxy)
    return None


if __name__ == '__main__':
    getHtml()



