# -*- coding: utf-8 -*-
import pymysql
import redis
import time
from weibo_spiders import settings
from .utils.mid_trans import url_to_mid
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html


class AdvSearchPipeline(object):
    def __init__(self):
        redis_pool = redis.ConnectionPool(
            host=settings.REDIS_HOST, port=settings.REDIS_PORT, password='')
        self.r = redis.Redis(connection_pool=redis_pool)

    def open_spider(self, spider):
        # self.kw = bytes.decode(self.r.get('CURRENT_KEY'))
        self.kw = spider.kw
        self.r.set('CURRENT_KEY', self.kw)
        self.conn = pymysql.connect(host=settings.MYSQL_HOST, user=settings.MYSQL_USER,
                                    passwd=settings.MYSQL_PASSWD, db=settings.MYSQL_DBNAME, charset='utf8')
        self.cur = self.conn.cursor()
        sql = '''CREATE TABLE IF NOT EXISTS `zhifou`.`{}`  (
                `mid_str` varchar(255) NOT NULL,
                `mid` varchar(255) NOT NULL,
                `user_url` varchar(255) NULL,
                `user_name` varchar(255) NULL,
                `verified_type` int(10) NULL,
                `text` varchar(2000) NULL,
                `source` varchar(255) NULL,
                `reposts_count` int(10) NULL,
                `comments_count` int(10) NULL,
                `attitudes_count` int(10) NULL,
                `created_at` timestamp(0) NULL,
                `crawl_time` timestamp(0) NULL,
                `has_show` int(1) DEFAULT 0,
                PRIMARY KEY (`mid_str`)
                );'''.format(self.kw.replace(" ", "_") + '_' + spider.name)
        self.cur.execute(sql)
        self.conn.commit()

    def process_item(self, item, spider):
        mid_str = item.get("mid_str")
        mid = url_to_mid(mid_str)
        user_url = item.get("user_url")
        user_name = item.get("user_name")
        verified_type = item.get("verified_type")
        text = item.get("text")
        source = item.get("source")
        created_at = item.get("created_at")
        reposts_count = item.get("reposts_count")
        comments_count = item.get("comments_count")
        attitudes_count = item.get("attitudes_count")

        crawl_time = time.strftime(
            '%Y-%m-%d %H:%M:%S', time.localtime())

        sql = '''insert ignore into {}(
                            mid_str, mid, user_url, user_name, verified_type,
                             text, source, reposts_count, comments_count,
                              attitudes_count, created_at, crawl_time)
                    VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)'''.format(self.kw.replace(" ", "_") + '_' + spider.name)
        self.cur.execute(sql, (mid_str, mid, user_url, user_name, verified_type,
                               text, source, reposts_count, comments_count,
                               attitudes_count, created_at, crawl_time))
        self.conn.commit()
        self.r.sadd('findSon:start_urls',
                    'https://m.weibo.cn/detail/{}'.format(mid))
        return item


class FindSonPipeline(object):
    def __init__(self):
        redis_pool = redis.ConnectionPool(
            host=settings.REDIS_HOST, port=settings.REDIS_PORT, password='')
        self.r = redis.Redis(connection_pool=redis_pool)

    def open_spider(self, spider):
        self.kw = bytes.decode(self.r.get('CURRENT_KEY'))
        self.conn = pymysql.connect(host=settings.MYSQL_HOST, user=settings.MYSQL_USER,
                                    passwd=settings.MYSQL_PASSWD, db=settings.MYSQL_DBNAME, charset='utf8')
        self.cur = self.conn.cursor()
        sql = '''CREATE TABLE IF NOT EXISTS `zhifou`.`{}`  (
                `mid` varchar(255) NOT NULL,
                `pid` varchar(255) NULL,
                `rootknot` varchar(255) NULL,
                `userid` varchar(255) NULL,
                `verified_type` int(10) NULL,
                `text` varchar(2000) NULL,
                `source` varchar(255) NULL,
                `reposts_count` int(10) NULL,
                `comments_count` int(10) NULL,
                `attitudes_count` int(10) NULL,
                `created_at` timestamp(0) NULL,
                `followers_count` int(10) NULL,
                `follow_count` int(10) NULL,
                `generation` int(10) NULL,
				`sons` int(10) NULL,
                PRIMARY KEY (`mid`)
                );'''.format(self.kw + '_' + spider.name)
        self.cur.execute(sql)
        self.conn.commit()

    def process_item(self, item, spider):
        mid = item.get("mid")
        pid = item.get("pid")
        rootknot = item.get("rootknot")
        userid = item.get("userid")
        verified_type = item.get("verified_type")
        text = item.get("text")
        source = item.get("source")
        created_at = item.get("created_at")
        reposts_count = item.get("reposts_count")
        comments_count = item.get("comments_count")
        attitudes_count = item.get("attitudes_count")
        follow_count = item.get("follow_count")
        followers_count = item.get("followers_count")
        generation = item.get("generation")

        sql = '''insert ignore into {}(
                            mid, pid, rootknot, userid, verified_type,
                             text, source, reposts_count, comments_count,
                              attitudes_count, created_at, generation,
                               follow_count, followers_count)
                    VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)'''.format(self.kw + '_' + spider.name)
        self.cur.execute(sql, (mid, pid, rootknot, userid, verified_type,
                               text, source, reposts_count, comments_count,
                               attitudes_count, created_at, generation,
                               follow_count, followers_count))
        self.conn.commit()
        self.r.sadd('userSpider:start_urls',
                    'https://weibo.cn/{}/info'.format(userid))
        return item


class UserSpiderPipeline(object):
    def open_spider(self, spider):
        redis_pool = redis.ConnectionPool(
            host=settings.REDIS_HOST, port=settings.REDIS_PORT, password='')
        self.r = redis.Redis(connection_pool=redis_pool)
        self.kw = bytes.decode(self.r.get('CURRENT_KEY'))
        self.conn = pymysql.connect(host=settings.MYSQL_HOST, user=settings.MYSQL_USER,
                                    passwd=settings.MYSQL_PASSWD, db=settings.MYSQL_DBNAME, charset='utf8')
        self.cur = self.conn.cursor()
        sql = '''CREATE TABLE IF NOT EXISTS `zhifou`.`{}`  (
                        `uid` varchar (255) NOT NULL,
                        `name` varchar(255) NULL,
                        `gender` varchar(255) NULL,
                        `location` varchar(255) NULL,
                        `birthday` varchar(255) NULL,
                        `identification` varchar(255) NULL,
                        `description` varchar(255) NULL,
                        PRIMARY KEY (`uid`)
                        );'''.format(self.kw + '_' + spider.name)
        self.cur.execute(sql)
        self.conn.commit()

    def process_item(self, item, spider):
        uid = item.get("user_id")
        name = item.get("user_name")
        gender = item.get("user_gender")
        location = item.get("user_location")
        birthday = item.get("user_birthday")
        identification = item.get("user_identification")
        description = item.get("user_description")

        sql = '''insert ignore into {}(
                                    uid, name, gender, location, birthday,
                                     identification, description)
                            VALUES (%s, %s, %s, %s, %s, %s, %s)'''.format(
            self.kw + '_' + spider.name)
        self.cur.execute(sql, (uid, name, gender, location,
                               birthday, identification, description))
        self.conn.commit()
        return item
