# -*- coding: utf-8 -*-

# Define here the models for your spider middleware
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/spider-middleware.html
import json
import random
from time import sleep

import requests
from scrapy import signals
from twisted.internet.defer import DeferredLock
from doubanscrapy.models import ProxyModel



# 请求头中间件
class UserAgentDownloadMiddleware(object):
    USER_AGENTS = [
        'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36',
        'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
        'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16',
        'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534.57.2 (KHTML, like Gecko) Version/5.1.7 Safari/534.57.2',
        'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:34.0) Gecko/20100101 Firefox/34.0',
        'Mozilla/5.0 (X11; U; Linux x86_64; zh-CN; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10'
    ]
    def process_request(self,request,spider):
        user_agent = random.choice(self.USER_AGENTS)
        request.headers['User-Agent'] = user_agent
        request.headers['Host'] = 'movie.douban.com'
        request.headers['Referer'] = 'https://movie.douban.com/tag/'
        request.headers['Cookie'] = 'bid=xErFR01YY8s; __utmc=30149280; __utmz=30149280.1615189533.2.2.utmcsr=google|utmccn=(organic)|utmcmd=organic|utmctr=(not%20provided); __utmc=223695111; __utmz=223695111.1616045867.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none); __yadk_uid=SYYIfKVKBDwqLSzWK5u1wfp3kDv0XRtX; ll="118273"; _vwo_uuid_v2=D1F5F3E27FB6B7A60C9C2B12CB8839DD4|cfb8ece10d7dc67d3ece172db0e63ca5; dbcl2="234503618:DQJAxKb4eUY"; ck=t8ua; push_doumail_num=0; push_noty_num=0; ct=y; __utmv=30149280.23450; _pk_ses.100001.4cf6=*; ap_v=0,6.0; __utma=30149280.264434586.1615170259.1616056063.1616092954.6; __utma=223695111.434989378.1616045867.1616056063.1616092954.4; __utmb=223695111.0.10.1616092954; __utmt=1; __utmb=30149280.2.10.1616092954; __gads=ID=31e1693311797bf2:T=1615170260:S=ALNI_MZiaOBbJi54TZEh6suPMIKTAA7obA; _pk_id.100001.4cf6=8ba7934ac40936db.1616045867.4.1616092986.1616056070.'

# 代理中间件
class IPProxyDownloadMiddleware(object):
    PROXY_URL = 'http://webapi.http.zhimacangku.com/getip?num=1&type=2&pro=&city=0&yys=0&port=11&time=1&ts=1&ys=0&cs=0&lb=1&sb=0&pb=45&mr=1&regions='

    def __init__(self):
        super(IPProxyDownloadMiddleware, self).__init__()
        self.current_proxy = None
        self.lock = DeferredLock()

    def process_request(self,request,spider):
        if 'proxy' not in request.meta or self.current_proxy.is_expiring:
            # 请求代理
            self.update_proxy()

        request.meta['proxy'] = self.current_proxy.proxy

    def process_response(self,request,response,spider):
        if response.status != 200 or "Fjob_detail" in response.url:
            if not self.current_proxy.blacked:
                self.current_proxy.blacked = True
            print('%s这个代理被拉黑'%self.current_proxy.ip)
            self.update_proxy()

            return request

        return response

    def update_proxy(self):
        self.lock.acquire()
        if not self.current_proxy or self.current_proxy.is_expiring or self.current_proxy.blacked:
            response = requests.get(self.PROXY_URL)
            text = response.text
            print('重新获取代理:',text)
            result = json.loads(text)
            if len(result['data']) > 0:
                data = result['data'][0]
                proxy_model = ProxyModel(data)
                self.current_proxy = proxy_model

        self.lock.release()