#!/usr/bin/env python
# encoding: utf-8
"""
@author: 李彦军
@contact: 773582348@qq.com

@version: 1.0
@license: Apache Licence
@file: crawl_ip.py
@time: 21/01/12 下午3:06

"""

import requests #用requests库来做简单的网络请求
import random
import os
import time



path = os.path.dirname(os.path.realpath(__file__))+r'/ip.txt'

def clear_table():
    # 清空表内容
    with open(path, 'w+') as f:
       f.write('')








# ip的管理类
class IPUtil(object):
    def crawl_ip(self):
        '''
        爬取所有代理ip,存入数据库
        :return:
        '''
        # clear_table()
        headers = {"User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:52.0) Gecko/20100101 Firefox/52.0"}
        ip_content = requests.get(headers=headers,
                                  url='http://http.tiqu.alibabaapi.com/getip?num=2&type=1&pack=62455&port=1&lb=1&pb=45&regions=').text
        # 每页提取完后就存入数据
        if ip_content.find('msg') == -1:
            with open(path, 'a+') as f:
                for ip_info in ip_content.split():
                    f.write(ip_info + '\n')
        else:
            print(ip_content)
    # noinspection SqlDialectInspection

    def get_random_ip(self):
        # 从数据库中随机获取一个可用的ip
        with open(path,'r') as f:
            ip_str = f.read()
            if ip_str == '':
                self.crawl_ip()
                return self.get_random_ip()
            random_ip = random.choice(ip_str.split())

        ip = random_ip.split(':')[0]
        port = random_ip.split(':')[1]
        ip_type = 'http'
        return "{2}://{0}:{1}".format(ip, port, str(ip_type).lower())
        # judge_re = self.judge_ip(ip, port, ip_type)
        # if judge_re:
        #     return "{2}://{0}:{1}".format(ip, port, str(ip_type).lower())
        # else:
        #     return self.get_random_ip()


    def judge_ip(self, ip, port, ip_type):
        # 判断ip是否可用，如果通过代理ip访问百度，返回code200则说明可用
        # 若不可用则从数据库中删除
        print('开始判断', ip, port, ip_type)
        http_url = "http://news.sohu.com/"
        proxy_url = "{2}://{0}:{1}".format(ip, port, str(ip_type).lower())
        try:
            proxy_dict = {
                "http": proxy_url,
            }
            response = requests.get(http_url, proxies=proxy_dict,timeout=5)
        except Exception as e:
            print('无效的ip和端口，无法连接百度')
            self.delete_ip(ip+':'+port)
            return False
        else:
            code = response.status_code
            if code >= 200 and code < 300:
                print('有效的ip'+str(code))
                return True
            else:
                print('无效的ip和端口，代码是'+ str(code))
                self.delete_ip(ip+':'+port)
                return False

    # noinspection SqlDialectInspection
    def delete_ip(self, ip):
        # 从数据库中删除无效的ip
        with open(path, 'r+') as f:
            print(ip)
            ip_str = f.read()
            ip_list = []
            for i in ip_str.split():
                ip_list.append(i)
            if ip in ip_list:
                try:
                    ip_list.remove(ip)
                except Exception as e:
                    print('ip已经不存在')
                    return True
            print(ip_list)

        with open(path, 'w+') as f:
            for i, v in enumerate(ip_list):
                f.write(v + '\n')
        print('删除成功')
        return True
    def clear_ip(self):
        with open(path,'r') as f:
            ip_str = f.read()
            if ip_str == '':
                self.crawl_ip()
            for i in ip_str.split():
                ip = i.split(':')[0]
                port = i.split(':')[1]
                ip_type = 'http'
                judge_re = self.judge_ip(ip, port, ip_type)
                if judge_re:
                    pass
                else:
                    self.delete_ip(i)


if __name__ == '__main__':
    # crawl_ip()

    ip = IPUtil()
    print(ip.get_random_ip())
    # ip.delete_ip('113.75.136.131:4565')
    # for i in range(20):
    #     print ip.get_random_ip()