# -*- coding: UTF-8 -*-
import datetime
import json

import logging
import pymysql
import scrapy
from scrapy.spiders import Spider
import time
import re

class WeiboBigVSpider(Spider):
    name = "WeiboBigVSpider"
    allowed_domains = ["sina.com.cn", "weibo.com", 'passport.weibo.com', 'weibo.cn']
    # 全局配置
    global cfg
    cfg = {'db': None, 'cursor': None, 'userId': '', 'userPwd': '', 'cookiejar': '', 'cookies': '', 'UA': '', 'ip': '',
           'index': 0}

    def start_requests(self):
        self.log("--------WeiboBigVSpider 开启程序--------", logging.INFO)
        cfg['db'] = pymysql.connect(host="58b9119f72fd0.bj.cdb.myqcloud.com",
                                    port=13244,
                                    user="cdb_outerroot",
                                    password="tutu123123",
                                    db="spider",
                                    charset='utf8')
        while True:
            self.log("--------获取用户名密码和IP信息--------", logging.INFO)
            cursor = cfg['db'].cursor()
            sql = "select id, user_id, user_pwd, user_cookie, UA, ip from weibo_login_wap where id =10"
            cursor.execute(sql)
            self.log("获取用户名密码和IP信息sql:"+sql,logging.INFO)
            results = cursor.fetchall()
            if len(results) > 0:
                cfg['userId'] = str(results[0][1])
                cfg['userPwd'] = str(results[0][2])
                cfg['UA'] = str(results[0][4])
                cfg['ip'] = str(results[0][5])
                cfg['cookies'] = str(results[0][3])
                self.log("------用户名密码UA\IP获取成功-------userid:" + cfg['userId'], logging.INFO)
                cursor.close()
                break
            else:
                self.log("--无可用用户名密码5秒后重试", logging.INFO)
                cursor.close()
                time.sleep(5)
                continue
        if cfg['userId'] != '':
            self.log('------递归处理account------', logging.INFO)
            user_cookie = cfg['cookies']
            p = re.compile('<Cookie (.*?) for .weibo.cn/>')
            cookies = re.findall(p, user_cookie)
            cookie_jar = (cookie.split('=') for cookie in cookies)
            dcookies = dict(cookie_jar)
            temp_url = "http://m.weibo.cn/api/container/getIndex?containerid=231074_-_Headuser_-_"
            tab_index= "1"
            current_url = temp_url+tab_index+"&page=1"
            request = scrapy.http.Request(current_url,
                                          cookies=dcookies,
                                          meta={'cookiejar': cfg['cookies'],
                                                'user_id': cfg['userId'],
                                                'current_url':current_url,
                                                'temp_url': temp_url,
                                                'tab_index':tab_index,
                                                'page': '1',
                                                'UA': cfg['UA']},
                                          callback=self.fitHtml)
            request.headers.setdefault('User-Agent', cfg['UA'])
            yield request

    def fitHtml(self, response):
        page = int(response.meta['page'])
        tab_index = int(response.meta['tab_index'])
        if response.status == 200:
            self.log("----------------------fitHtml----------------------" + time.strftime('%Y-%m-%d %X', time.localtime()), logging.INFO)
            current_url = str(response.meta['current_url'])
            self.log("------------------------url:" + current_url, logging.INFO)
            bodystr = str(response.body, "utf-8")
            ok = 0
            if bodystr != "":
                hjson = json.loads(bodystr)
                if 'ok' in hjson and 'cards' in hjson:
                    ok = int(hjson['ok'])
                    if len(hjson['cards']) > 0 and len(hjson['cards'][0]['card_group'])> 0:
                        for card in hjson['cards'][0]['card_group']:
                            b = BigVUser()
                            b.uid = card['user']['id']
                            b.name = str(card['user']['screen_name']).replace('"',"'")
                            b.headimg = card['user']['profile_image_url']
                            b.homeurl = card['user']['profile_url']
                            b.verified = str(card['user']['verified'])
                            b.verified_type = str(card['user']['verified_type'])
                            if b.verified == 'True':
                                b.verified_type_ext = str(card['user']['verified_type_ext'])
                                b.verified_reason = str(card['user']['verified_reason'])
                            b.description = str(card['user']['description']).replace('"',"'")
                            b.sex = card['user']['gender']
                            b.guanzhu = str(card['user']['follow_count'])
                            b.fensi = str(card['user']['followers_count'])
                            self.updateAccountBigV(b)
            # 构造新url
            dcookies = self.get_dict_cookie()
            # 取15页
            if ok:
                page += 1
                temp_url = str(response.meta['temp_url'])
                current_url = temp_url + str(tab_index) + "&page=" + str(page)
                request = scrapy.http.Request(current_url,
                                              cookies=dcookies,
                                              meta={'cookiejar': cfg['cookies'],
                                                    'user_id': cfg['userId'],
                                                    'current_url': current_url,
                                                    'temp_url': temp_url,
                                                    'tab_index': str(tab_index),
                                                    'page': str(page),
                                                    'UA': cfg['UA']},
                                              callback=self.fitHtml)
                request.headers.setdefault('User-Agent', cfg['UA'])
                yield request
            elif tab_index < 10:
                temp_url = str(response.meta['temp_url'])
                tab_index += 1
                current_url = temp_url + str(tab_index) + "&page=1"
                request = scrapy.http.Request(current_url,
                                              cookies=dcookies,
                                              meta={'cookiejar': cfg['cookies'],
                                                    'user_id': cfg['userId'],
                                                    'current_url': current_url,
                                                    'temp_url': temp_url,
                                                    'tab_index': str(tab_index),
                                                    'page': '1',
                                                    'UA': cfg['UA']},
                                              callback=self.fitHtml)
                request.headers.setdefault('User-Agent', cfg['UA'])
                yield request
        else:
            self.log("HttpCode不为200.IP被封禁", logging.INFO)
            self.log(str(response.status), logging.INFO)
            self.log("等待10分钟,退出spider", logging.INFO)
            self.log('userId:'+ cfg['userId'])
            time.sleep(600)

    def updateAccountBigV(self, b):
        while True:
            try:
                cursor = cfg['db'].cursor()
                sql = "select * from weibo_account_bigv where uid ='{0}'".format(b.uid)
                cursor.execute(sql)
                results = cursor.fetchall()
                sql1= ''
                if len(results) > 0:
                    sql1= '''UPDATE weibo_account_bigv
                            SET name = "{0}",
                             allname = "{1}",
                             headimg = "{2}",
                             verified = "{3}",
                             verified_type = "{4}",
                             verified_type_ext = "{5}",
                             sex = "{6}",
                             verified_reason = "{7}",
                             homeurl ="{8}",
                             description = "{9}",
                             guanzhu = "{10}",
                             fensi ="{11}"
                            WHERE
                                uid = "{12}"'''.format(b.name,
                                                       b.allname,
                                                       b.headimg,
                                                       b.verified,
                                                       b.verified_type,
                                                       b.verified_type_ext,
                                                       b.sex,
                                                       b.verified_reason,
                                                       b.homeurl,
                                                       b.description,
                                                       b.guanzhu,
                                                       b.fensi,
                                                       b.uid)
                else:
                    sql1 = '''INSERT INTO weibo_account_bigv (name,
                                                                allname,
                                                                headimg,
                                                                verified,
                                                                verified_type,
                                                                verified_type_ext,
                                                                sex,
                                                                verified_reason,
                                                                homeurl,
                                                                uid,
                                                                description,
                                                                guanzhu,
                                                                fensi)
                                                            VALUES
                                                                ("{0}",
                                                                 "{1}",
                                                                 "{2}",
                                                                 "{3}",
                                                                 "{4}",
                                                                 "{5}",
                                                                 "{6}",
                                                                 "{7}",
                                                                 "{8}",
                                                                 "{9}",
                                                                 "{10}",
                                                                 "{11}",
                                                                 "{12}"
                                                                )'''.format(b.name,
                                                                           b.allname,
                                                                           b.headimg,
                                                                           b.verified,
                                                                           b.verified_type,
                                                                           b.verified_type_ext,
                                                                           b.sex,
                                                                           b.verified_reason,
                                                                           b.homeurl,
                                                                           b.uid,
                                                                           b.description,
                                                                           b.guanzhu,
                                                                           b.fensi)
                # print("************sql1************")
                # self.log(sql1, logging.ERROR)
                cursor.execute(sql1)
                cfg['db'].commit()
                cursor.close()
                break
            except Exception as e:
                self.log(str(e), logging.ERROR)
                time.sleep(10)
                try:
                    cfg['db'].close()
                except Exception as e:
                    print(e)
                try:
                    cfg['db'] = pymysql.connect(host="58b9119f72fd0.bj.cdb.myqcloud.com",
                                                port=13244,
                                                user="cdb_outerroot",
                                                password="tutu123123",
                                                db="spider",
                                                charset='utf8')
                except Exception as e:
                    print(e)

    def parse(self, response):
        self.log("insert parse",logging.INFO)

    def close(spider, reason):
        cfg['db'].close()
        print("--------close db--------")
    def get_dict_cookie(self):
        user_cookie = cfg['cookies']
        p = re.compile('\<cookie (.*?) for .weibo.cn\/\>')
        cookies = re.findall(p, user_cookie)
        cookie_jar = (cookie.split('=') for cookie in cookies)
        dcookies = dict(cookie_jar)
        return dcookies

class BigVUser:
    uid = ''
    name = ''
    allname = ''
    headimg = ''
    homeurl = ''
    verified = ''
    verified_type = ''
    verified_type_ext = ''
    verified_reason = ''
    description = ''
    sex = ''
    guanzhu = ''
    fensi = ''
