import requests
from fake_useragent import UserAgent
from lxml import etree
import time

from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from fake_useragent import UserAgent

import requests
import random
import json
page_url = "http://dev.kdlapi.com/testproxy"  # 要访问的目标网页
# API接口，返回格式为json

# API接口返回的ip
proxy_ip = requests.get(page_url).text
print(proxy_ip)

# 提取代理API接口，获取1个代理IP
api_url = "https://dps.kdlapi.com/api/getdps/?orderid=969446626073773&num=20&pt=1&format=json&sep=1"

# 获取API接口返回的代理IP
proxy_ip_s = requests.get(api_url).text
proxy_list = json.loads(proxy_ip_s)['data']['proxy_list']
# print(len(proxy_list))
ip_port = random.choice(proxy_list)

def sel(ip_port):
    chromeOptions = webdriver.ChromeOptions()
    # 设置代理
    chromeOptions.add_argument("--proxy-server=http://{}".format(ip_port))
    chromeOptions.add_argument('user-agent={}'.format(UserAgent().random))

    # 一定要注意，=两边不能有空格，不能是这样--proxy-server = http://202.20.16.82:10152
    driver = webdriver.Chrome(chrome_options=chromeOptions)
    # 查看本机信息，查看代理是否起作用
    # driver.get("http://httpbin.org/get")
    driver.get("https://www.baidu.com/")
    driver.maximize_window()
    driver.find_element_by_xpath("//input[@id='kw']").send_keys('西红柿的功效和作用')
    time.sleep(4)
    baidu = driver.find_element_by_xpath("//input[@id='su']")
    baidu.click()
    driver.implicitly_wait(7)
    # element = WebDriverWait(driver, 10).until(
    #     lambda x: x.find_element_by_xpath('//div[@id="bili_report_anime"]/div[1]/div[1]/div[1]/div[3]/a').is_displayed())
    element = driver.find_element_by_xpath('//div[@id="{}"]/h3/a'.format(8))
    # time.sleep(15)
    local = element.location_once_scrolled_into_view
    # local = element.location
    print(local)
    driver.execute_script("window.scrollTo({},{})".format(local.get('y'),local.get('y')+50))
    time.sleep(100)

def get_ip():
    ip_port = ''
    headers = {
            'User-Agent': UserAgent().random,
        }
    con = requests.get('https://www.kuaidaili.com/free/',headers=headers)
    ip_li = etree.HTML(con.text).xpath('//*[@id="list"]/table/tbody/tr/td[1]/text()')
    port_li = etree.HTML(con.text).xpath('//*[@id="list"]/table/tbody/tr/td[2]/text()')
    print(ip_li,port_li)
    for ip,port in zip(ip_li,port_li):
        ip_port = '{}:{}'.format(ip,port)
        proxy = {"http": "http://" + ip_port, "https": "https://" + ip_port}
        try:
            resp = requests.get('https://www.baidu.com/',headers=headers,proxies=proxy,timeout=5)
            if resp.status_code == 200:
                print(ip_port)
            return  ip_port
        except Exception:
            print('此IP无用{}'.format(ip_port))
# while True:
#     ip = get_ip()
#     if ip:
#         sel(ip)
#         break
if __name__ == '__main__':
    sel(ip_port)