#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import os
import rsa
import time
import base64
import requests
from download.downloader import Downloader
from public.util import cookie_to_dic

import logging


class SycmDownloader(Downloader):
    def __init__(self, config):
        super(SycmDownloader, self).__init__(config)
        self.hash_key = '{}_cookie_hash'.format(self.name)
        # 密钥
        self.password = self.rds.hget(self.hash_key, 'password').encode('utf-8')
        # rsa 加密
        rsa_begin = '-----BEGIN PUBLIC KEY-----'
        rsa_end = '-----END PUBLIC KEY-----'
        rsa_key = self.rds.hget(self.hash_key, 'pub_key')
        self.pub_key = '{}\n{}\n{}\n'.format(rsa_begin, rsa_key, rsa_end)
        self.cookie_field = os.getenv('ACCOUNT_NAME', 'cookie')
        self.cookie_queue = '{}_cookie_list'.format(self.name)
        self.session = None
        self.cookie = None
        # 是否需要新的cookie
        self.new_cookie = True
        self.try_num = 0
        # 处理数量，衡量cookie是否可以继续使用
        self.deal_num = 0

    def get_transit_id(self):
        pub_key = rsa.PublicKey.load_pkcs1_openssl_pem(self.pub_key)
        crypto = rsa.encrypt(self.password, pub_key)
        transit_id = base64.b64encode(crypto).decode("utf-8")
        return transit_id

    def get_headers(self, task):
        headers = {
            'transit-id': self.get_transit_id(),
        }
        return headers

    def get_session(self, headers, task):
        if self.session is None:
            # 生成session
            self.session = requests.Session()
            self.session.headers.update({
                'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) '
                              'Chrome/85.0.4183.121 Safari/537.36',
                'referer': 'https://sycm.taobao.com/mc/mq/search_analyze?activeKey=relation',
                'sycm-referer': '/mc/mq/search_analyze'
            })
            if self.new_cookie:
                while True:
                    # 先从队列里读取
                    cookie_str = self.rds.lpop(self.cookie_queue)
                    if cookie_str is None:
                        # 再从hash里取
                        cookie_str = self.rds.hget(self.hash_key, self.cookie_field)
                    if cookie_str == self.cookie:
                        logging.info('sleep 3 seconds for new cookie...')
                        time.sleep(3)
                        continue
                    self.cookie = cookie_str
                    break
                self.new_cookie = False
            cookie_dic = cookie_to_dic(self.cookie)
            for k, v in cookie_dic.items():
                self.session.cookies[k] = v
            cookie_dic = self.session.cookies.get_dict()
            logging.info('new session id [{}]'.format(cookie_dic.get('JSESSIONID', '')))
        self.session.headers.update(headers)
        return self.session

    def request(self, headers, proxies, task):
        s = self.get_session(headers, task)
        if proxies is None:
            response = s.get(task['url'], params=task['params'], timeout=self.time_out)
        else:
            response = s.get(task['url'], proxies=proxies, params=task['params'], timeout=self.time_out)
        self.deal_num += 1
        return response

    def except_deal(self, task):
        # 处理任务超过3个认为该cookie可以复用
        if self.deal_num > 3:
            self.deal_num = 0
            self.new_cookie = False
        else:
            self.try_num += 1
            if self.try_num > 5:
                self.new_cookie = True
                self.try_num = 0
            else:
                # 取到新cookie多试几次，可能过段时间就可以了
                for i in range(180):
                    if i % 10 == 0:
                        logging.info('sleep 180 to active cookie, {} seconds left...'.format(180-i))
                    time.sleep(1)
                self.new_cookie = False
        self.session = None
        logging.info('after deal {} words except occured, new_cookie {}'.format(self.deal_num, self.new_cookie))
