# -*- coding=utf-8 -*-

import os
import requests
from bs4 import BeautifulSoup

def read_file_content(file_name):
    file_object = open(file_name, 'r', encoding='utf-8')
    try:
        all_the_text = file_object.read()
    finally:
        file_object.close()

    return all_the_text

def write_to_txt(content, filename):
    # 如果文件存在就删除
    if (os.path.exists(filename)):
        os.remove(filename)

    outfile = open(filename, "w", encoding='utf-8')
    outfile.write(content)
    outfile.close()
    print("文件保存成功:"+filename)


def generate_proxy_content_per(temp_ip):
    #print("获取到的ip list是"+str(temp_ip))
    ip_dict = {}
    ip_dict["http_type"] = temp_ip[2]
    ip_dict["ip"] = temp_ip[0]
    ip_dict["port"] = temp_ip[1]
    #print(ip_dict)

    proxy_host = "http://" + ip_dict["ip"] + ":" + ip_dict["port"]
    if (ip_dict["http_type"] == 'HTTPS'):
        proxy_temp = {"https": proxy_host}
    elif (ip_dict["http_type"] == 'HTTP'):
        proxy_temp = {"http": proxy_host}
    else:
        print("既不是http，也不是https请求")
    print("构造出来的proxy是："+str(proxy_temp))
    return proxy_temp

def get_proxy_list():
    file_fullpath='kuaidaili_proxy_web_source.txt'
    if not os.path.exists(file_fullpath):
        User_Agent = 'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:43.0) Gecko/20100101 Firefox/43.0'
        header = {}
        header['User-Agent'] = User_Agent
        #proxy = {'http': 'http://125.89.54.194:9797','https': 'http://222.29.118.229:1080'}
        # url = 'http://www.xicidaili.com/nn/1'
        #url = 'http://www.xicidaili.com/nt'
        url='https://www.kuaidaili.com/free/inha/1/'
        req = requests.get(url, headers=header, proxies=None)
        res = req.content.decode("utf-8")
        print(res)

        write_to_txt(res, file_fullpath)
        print("成功保存代理网页源代码到本地")
    else:
        print("代理网页已在本地保存")

    res=read_file_content(file_fullpath)
    soup = BeautifulSoup(res, "html.parser")
    ips = soup.findAll('tr')

    ip_list = []
    for x in range(1, len(ips)):
        iplist_temp = []
        ip = ips[x]
        tds = ip.findAll("td")
        ip_temp = tds[0].text
        port_temp = tds[1].text
        port_http_type = tds[3].text
        #print(ip_temp + "\t" + port_temp+ "\t" + port_http_type+ "\n")
        iplist_temp.append(ip_temp)
        iplist_temp.append(port_temp)
        iplist_temp.append(port_http_type)
        ip_list.append(iplist_temp)
    print("成功获取iplist")
    return ip_list

if __name__ == '__main__':
    ip_list=get_proxy_list()

    for ip in ip_list:
        proxy = generate_proxy_content_per(ip)

        #print(proxy)