#!/usr/bin/env python
# coding=utf-8
# author: zengyuetian
# 此代码仅供学习与交流，请勿用于商业用途。
# 用于获取代理

from bs4 import BeautifulSoup
import requests
from lib.request.headers import create_headers
from lib.utility.path import get_root_path
import json
import importlib,sys
from lib.utility.log import *
proxys_src = []
proxys = []


def spider_proxyip(num=40):
    try:
        url = 'http://www.xicidaili.com/nt/1'
        # url = 'http://www.89ip.cn/index_2.html'
        req = requests.get(url, headers=create_headers())
        source_code = req.content
        # print(source_code)
        soup = BeautifulSoup(source_code, 'lxml')
        ips = soup.findAll('tr')

        for x in range(1, len(ips)):
            ip = ips[x]
            tds = ip.findAll("td")
            proxy_host = "{0}://".format(tds[5].contents[0]) + tds[1].contents[0] + ":" + tds[2].contents[0]
            proxy_temp = {tds[5].contents[0]: proxy_host}
            #免费代理
            # proxy_host = "{0}".format("https://"+tds[0].contents[0].strip()+":"+tds[1].contents[0].strip())
            # proxy_temp = {"https":proxy_host}
            proxys_src.append(proxy_temp)
            if x >= num:
                break

            #代理ip写入txt
        file = open(get_root_path()+'/data/proxyip.txt','w')
        file.writelines(json.dumps(proxys_src,ensure_ascii=False))
        file.close()
    except Exception as e:
        print("spider_proxyip exception:")
        print(e)

def get_proxy_ip():
    try:
        file = open(get_root_path()+'/data/proxyip.txt','r')
        data = file.read()
        file.close()
        return json.loads(data)
    except Exception as e:
        print("read proxy fail")
        print(e)


if __name__ == '__main__':
    pass
    # importlib.reload(sys)
    # spider_proxyip(40)
    # logger.info(proxys_src)
    # print("Finish Proxy")
