# -*- coding: utf-8 -*-

import time
import requests
import sys
import random
import redis
redis_db = redis.Redis(host='127.0.0.1', port=6379, db=0) #连接redis，相当于MySQL的conn
redis_proxy_key = 'crawl2018:proxie'
from urllib import request
from bs4 import BeautifulSoup as BS
import ssl

def get_random_proxy(title):
    '''随机从文件中读取proxy'''
    redis_proxy_key = 'crawl2018:proxie'
    all_proxy = list(redis_db.smembers(redis_proxy_key))

    # sougou_wx_url = "http://weixin.sogou.com/weixin?type=2&s_from=input&ie=utf8&query="+title
    sougou_wx_url = "https://www.baidu.com/s?ie=utf-8&f=8&rsv_bp=1&tn=baidu&wd=ip地址查询"
    for p in all_proxy:
        proxy = random.choice(all_proxy).decode('utf-8')
        proxies_ary = proxy.split('://')
        # proxies = {proxies_ary[0]: proxies_ary[1]}
        proxies = {'http': proxies_ary[1]}
        ssl._create_default_https_context = ssl._create_unverified_context
        response = requests.get(sougou_wx_url, proxies=proxies, timeout=2,verify=True)

        if response.status_code == 200:
            html = response.text
            soup = BS(html, "lxml")

            print('合法ip %s' % proxy)
            print(soup.find(class_='c-gap-right'))
            # if 'antispider' in response.url:
            #     print('频繁访问，跳转了')
            #     print(response.url)


            print(response.text.encode('utf-8'))
            exit();
        else:
            print('不合法ip %s' % proxy)
            redis_db.srem(redis_proxy_key, proxy.encode('utf-8'))

if __name__ == '__main__':
    # title = sys.argv[0]
    title = '新垣结衣'
    get_random_proxy(title)