#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Created on 2017-08-15 09:34:17
# Project: toutiao
#36379

### 头条  id 6


import json
from pyspider.libs.base_handler import *
import hashlib
import time
import pyquery
import config as cf
import pymysql
import db_action

class Handler(BaseHandler):
    crawl_config = {

    }

    def __init__(self):
        conn = pymysql.connect(**cf.mysql_option)
        cursor = conn.cursor()
        self.conn = conn
        self.cursor = cursor

    def md5_(self,key):
        md5 = hashlib.md5()
        md5.update(key.encode())
        return md5.hexdigest()

    def params_g(self):
        t = int(time.time())
        i = hex(t).replace("0x", '').upper()
        e = str(self.md5_(str(t))).replace("0x", '').upper()

        s = e[0:5]
        o = e[-5:]
        n = ''
        l = ''
        for x in range(5):
            n += s[x] + o[x]
        for j in range(5):
            l += i[j + 3] + o[j]
        as_ = 'A1' + n + i[-3:]
        cp = i[:3] + l + 'E1'
        return as_, cp

    @every(minutes=4 * 60)
    def on_start(self):
        self.cursor.execute("select * from qp.med_plat_account WHERE plat_id = 6;")
        result = self.cursor.fetchall()
        for i in result:
            self.crawl('https://www.toutiao.com/search_content/',callback=self.user_page,validate_cert=False,
                       params={
                            "offset":0,
                            "format":"json",
                            "keyword":i['account_name'].split("-")[-1],
                            "count":20,
                            "cur_tab":4,
                           "autoload":True
                       }, save=i
                       )

    @config(age=60 * 60)
    def user_page(self,response):
        user_info = response.save
        extend = user_info['extend'] or 0
        data = json.loads(response.text)['data'][extend]
        user_id = data['source_url'].split("/")[-2]
        user_id = data['get_data_url']
        as_ ,cp = self.params_g()
        self.crawl('https://www.toutiao.com/c/user/article/',
                   callback=self.detail_page,
                   validate_cert=False,
                   params={
                        "page_type":0,
                        "user_id":user_id,
                        "count":200,
                        "as":as_,
                        "cp":cp
                   },
                   save={"user_id":user_id,
                         "as":as_,
                         "cp":cp,
                         "user_info":user_info
                         }
                   )


    @config(priority=2)
    def detail_page(self, response):
        data = json.loads(response.text)
        user_info = response.save['user_info']
        flow_info = data['data']
        for i in flow_info:
            i['plat_id'] = 6
            i['user_info'] = user_info
            db_action.store_info(i)
        if data['has_more']:
            params = response.save
            self.crawl('https://www.toutiao.com/c/user/article/',
                       callback=self.detail_page,
                       validate_cert=False,
                       params={
                            "page_type":0,
                            "user_id":params['user_id'],
                            "count":200,
                            "as":params["as"],
                            "cp":params['cp'],
                            "max_behot_time":flow_info[-1]['behot_time']
                       },
                       save={"user_info": params['user_info'],
                              "as": params['as'],
                              "cp": params['cp'],
                             "user_id":params["user_id"]
                              }
                    )
        else:
            return {
                "url":response.url,
                "time":time.time()
            }

