from fake_useragent import UserAgent
from scrapy.http import Response
from scrapy.conf import settings
import random
import pymysql
from scrapy.conf import settings
import requests

# 定义一个中间件就是定义一个类
class RandomUserAgent(object):
    def __init__(self):
        self.ua = UserAgent()
    def process_request(self ,request ,spider):
        request.headers['User-Agent'] = self.ua.random
        # print(request.headers)


# 随机代理中间件，混合免费代理和认证代理  (数据库版)
class RandomProxyMysql(object):


    def __init__(self):

        self.conn = pymysql.connect('127.0.0.1', 'root', 'py09hu','py09charm', charset='utf8')
        self.cursor = self.conn.cursor()
    # 发起请求前执行
    def getdaili(self):
        vipurl = 'http://api.ip.data5u.com/api/get.shtml?order=b520d7549dcaa15e3f489069c137278b&num=100&area=%E4%B8%AD%E5%9B%BD&carrier=0&protocol=0&an1=1&an2=2&an3=3&sp1=1&sp2=2&sp3=3&sort=1&system=1&distinct=0&rettype=1&seprator=%0D%0A'
        res = requests.get(vipurl)
        res.encoding = res.apparent_encoding
        pinfo = res.text

        data = pinfo.split('\r\n')
        # print(data)
        ip = data[0]
        proxy = 'http://%s' % (ip)
        # print(proxy)
        return proxy

    def process_request(self,request,spider):
        # 获取代理
        # proxy = self.random_proxy()
        # 设置代理信息
        # print(proxy)
        # request.meta['proxy'] = 'http://%s:%s' % (proxy[1],proxy[2])
        proxy = self.getdaili()
        # print(proxy)
        request.meta['proxy'] = 'http://39.105.78.30:3128'
    # 响应以后执行


    def process_response(self,request ,response,spider):
        print(response.status)
        return response

    def random_proxy(self):
        sql = 'select * from py09_ip ORDER BY rand() limit 1'
        self.cursor.execute(sql)
        proxy = self.cursor.fetchone()
        return proxy




