# -*- coding: utf-8 -*-

import requests
from bs4 import BeautifulSoup
import lxml
from multiprocessing import Process, Queue
import random
import json
import time
import requests
import os
import redis
redis_db = redis.Redis(host='127.0.0.1', port=6379, db=0) #连接redis，相当于MySQL的conn
redis_proxy_key = 'crawl2018:proxie'

def get_random_proxy():
    '''随机从文件中读取proxy'''
    all_proxy = list(redis_db.smembers(redis_proxy_key))
    for proxy in all_proxy:
        proxy = proxy.decode('utf-8')
        proxies_ary = proxy.split('://')
        # proxies = {proxies_ary[0]:proxies_ary[1]}
        proxies = {proxies_ary[0]:'14.153.55.169:3128'}
        if requests.get('http://www.baidu.com', proxies=proxies, timeout=2).status_code == 200:
            print('合法ip %s' % proxy)
            # return proxy
        else:
            print('不合法ip %s' % proxy)

if __name__ == '__main__':
    # redis_db.lpop(redis_proxy_key)
    get_random_proxy()