#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Project: spd-sxmcc
"""
@author: lyndon
@time Created on 2019/2/26 13:40
@desc
"""

import os
import requests
from bs4 import BeautifulSoup
from lxml import etree

default_conf = ""

def get_xici_ips(page=1):
    # requests的Session可以自动保持cookie,不需要自己维护cookie内容
    # page = 2
    S = requests.Session()
    target_url = 'http://www.xicidaili.com/nn/%d' % page
    target_headers = {'Upgrade-Insecure-Requests': '1',
                      'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36',
                      'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
                      'Referer': 'http://www.xicidaili.com/nn/',
                      'Accept-Encoding': 'gzip, deflate, sdch',
                      'Accept-Language': 'zh-CN,zh;q=0.8',
                      }
    target_response = S.get(url=target_url, headers=target_headers)
    target_response.encoding = 'utf-8'
    target_html = target_response.text
    bf1_ip_list = BeautifulSoup(target_html, 'lxml')
    bf2_ip_list = BeautifulSoup(str(bf1_ip_list.find_all(id='ip_list')), 'lxml')
    ip_list_info_content = bf2_ip_list.table.contents
    ip_list_info = []
    for index in range(len(ip_list_info_content)):
        if index % 2 == 1 and index != 1:
            dom = etree.HTML(str(ip_list_info_content[index]))
            ip = dom.xpath('//td[2]')
            port = dom.xpath('//td[3]')
            protocol = dom.xpath('//td[6]')
            ip_list_info.append(':'.join([ip[0].text, port[0].text, protocol[0].text.lower()]))
            # print(ip[0].text)
            # print(port[0].text)
            # print(protocol[0].text.lower())
            # print('-------------------------------------------')
            # split_flag = '\t'
            # row_flag = '\n'
            # f.write(ip[0].text + ':' + port[0].text + split_flag + protocol[
            #     0].text.lower() + split_flag + nowTime() + row_flag)
            # f.flush()

    return ip_list_info


def update_squid_conf():
    default_conf = open(r'D:\iProject\myPython\com\teradata\squid\conf\squid.conf.example', 'r').read()
    default_conf += 'cache_dir null /tmp\n'
    # proxy_list = response.json['data']['proxy_list']
    proxy_list = get_xici_ips()
    for index in range(len(proxy_list)):
        proxy_text = proxy_list[index]
        # print(proxy_text)
        proxy = proxy_text.split(':')
        # print(proxy)
        proxy_conf = "cache_peer " + proxy[0] + " parent " + proxy[1] + " 0 no-query weighted-round-robin weight=2 connect-fail-limit=2 allow-miss max-conn=5 name=proxy-" + str(index) + "\n"
        default_conf += proxy_conf
    return default_conf

if __name__ == '__main__':
    default_conf = update_squid_conf()
    conf = open(r'D:\iProject\myPython\com\teradata\squid\conf\squid.conf', 'w')
    conf.write(default_conf)
    conf.close()
    # message = os.system('systemctl restart conf')
    # print(message)

