# -*- coding:utf-8 _*-
"""
@author:Zhang Yafei
@time: 2019/12/02
"""
import re
import time
from functools import wraps

import pandas as pd
import requests
from DBUtils.PooledDB import PooledDB
from redis import ConnectionPool, Redis
from scrapy.utils.project import get_project_settings
from sqlalchemy import create_engine

from dingxiangyuan import settings


def timeit(func):
    """
    装饰器： 判断函数执行时间
    :param func:
    :return:
    """

    @wraps(func)
    def inner(*args, **kwargs):
        start = time.time()
        ret = func(*args, **kwargs)
        end = time.time() - start
        if end < 60:
            print(f'花费时间：\t{round(end, 2)}秒')
        else:
            min, sec = divmod(end, 60)
            print(f'花费时间\t{round(min)}分\t{round(sec, 2)}秒')
        return ret

    return inner


class DBPoolHelper(object):
    def __init__(self, dbname, user=None, password=None, db_type='postgressql', host='localhost', port='5432'):
        """
        # sqlite3
        # 连接数据库文件名，sqlite不支持加密，不使用用户名和密码
        import sqlite3
        config = {"datanase": "path/to/your/dbname.db"}
        pool = PooledDB(sqlite3, maxcached=50, maxconnections=1000, maxusage=1000, **config)
        # mysql
        import pymysql
        pool = PooledDB(pymysql,5,host='localhost', user='root',passwd='pwd',db='myDB',port=3306) #5为连接池里的最少连接数
        # postgressql
        import psycopg2
        POOL = PooledDB(creator=psycopg2, host="127.0.0.1", port="5342", user, password, database)
        # sqlserver
        import pymssql
        pool = PooledDB(creator=pymssql, host=host, port=port, user=user, password=password, database=database, charset="utf8")
        :param type:
        """
        if db_type == 'postgressql':
            import psycopg2
            pool = PooledDB(creator=psycopg2, host=host, port=port, user=user, password=password, database=dbname)
        elif db_type == 'mysql':
            import pymysql
            pool = PooledDB(pymysql, 5, host='localhost', user='root', passwd='pwd', db='myDB',
                            port=3306)  # 5为连接池里的最少连接数
        elif db_type == 'sqlite':
            import sqlite3
            config = {"datanase": dbname}
            pool = PooledDB(sqlite3, maxcached=50, maxconnections=1000, maxusage=1000, **config)
        else:
            raise Exception('请输入正确的数据库类型, db_type="postgresql" or db_type="mysql" or db_type="sqlite"')
        self.conn = pool.connection()
        self.cursor = self.conn.cursor()

    def connect_close(self):
        """关闭连接"""
        self.cursor.close()
        self.conn.close()

    def commit(self):
        self.conn.commit()

    def execute(self, sql, params=tuple()):
        self.cursor.execute(sql, params)  # 执行这个语句

    def execute_with_commit(self, sql, params=tuple()):
        self.cursor.execute(sql, params)  # 执行这个语句
        self.conn.commit()

    def execute_many(self, sql, params=tuple()):
        self.cursor.executemany(sql, params)
        self.conn.commit()

    def fetchone(self, sql, params=tuple()):
        self.cursor.execute(sql, params)
        data = self.cursor.fetchone()
        return data

    def fetchall(self, sql, params=tuple()):
        self.cursor.execute(sql, params)
        data = self.cursor.fetchall()
        return data

    def __del__(self):
        self.connect_close()


def pandas_db_helper():
    """
    'postgresql://postgres:0000@127.0.0.1:5432/xiaomuchong'
    "mysql+pymysql://root:0000@127.0.0.1:3306/srld?charset=utf8mb4"
    "sqlite: ///sqlite3.db"
    """
    engine = create_engine(settings.DATABASE_ENGINE)
    conn = engine.connect()
    return conn


def redis_init():
    settings = get_project_settings()
    if settings["REDIS_PARAMS"]:
        pool = ConnectionPool(host=settings["REDIS_HOST"], port=settings["REDIS_PORT"],
                              password=settings["REDIS_PARAMS"]['password'])
    else:
        pool = ConnectionPool(host=settings["REDIS_HOST"], port=settings["REDIS_PORT"])
    conn = Redis(connection_pool=pool)
    return conn


redis_conn = redis_init()
db_conn = pandas_db_helper()


def cal_page_url(row):
    topic_url, reply_num = row[0], row[1]
    page_num = reply_num // 35 + 1
    redis_conn.sadd('topic_page_urls', topic_url)
    for page in range(2, page_num + 1):
        redis_conn.sadd('topic_page_urls', f'{topic_url}?ppg={page}')
    print(topic_url)


def insert_redis_topic_page_urls():
    data = pd.read_sql(sql="topics", con=db_conn, columns=["topic_url", "reply_num"])
    data.apply(cal_page_url, axis=1)


def insert_into_topic_rate():
    """ 插入积分表 """
    postgres = DBPoolHelper(db_type='postgressql', dbname='dingxiangyuan', user='postgres', password='0000',
                            host='localhost', port='5432')
    data1 = pd.read_sql(sql="select topic_url from posts_replies where floor=1", con=db_conn)
    data2 = pd.read_sql(sql="select topic_url from topic_rate_get", con=db_conn)
    topic_urls = set(data1['topic_url']) - set(data2['topic_url'])
    for topic_url in topic_urls:
        res = pd.read_sql(sql='select topic_type, board_name from posts_replies where floor=1 and topic_url=%s',
                          con=db_conn, params=(topic_url,))
        topic_type, board_name = res['topic_type'].values[0], res['board_name'].values[0]
        try:
            postgres.execute(
                sql="INSERT INTO topic_rate_get(topic_url, topic_type, board_name, rate_get) VALUES(%s, %s, %s, 0)",
                params=(topic_url, topic_type, board_name))
            print('插入成功')
        except Exception as e:
            print('插入失败', e)
    postgres.connect_close()


def delete_empty_topic_url():
    """ 删除主题帖不存在的回复 """
    postgres = DBPoolHelper(db_type='postgressql', dbname='dingxiangyuan', user='postgres', password='0000',
                            host='localhost', port='5432')
    data1 = pd.read_sql('select topic_url from posts_replies where floor=1', con=db_conn)
    data2 = pd.read_sql('select distinct topic_url from posts_replies', con=db_conn)
    topic_urls = set(data2['topic_url']) - set(data1['topic_url'])
    print(len(topic_urls))
    for index, url in enumerate(topic_urls):
        try:
            postgres.execute('delete from posts_replies where topic_url=%s', params=(url,))
            print(f'删除成功\t{index}')
        except Exception as e:
            print(f'删除失败\t{index}\t{e}')
    postgres.connect_close()


def delete_user_invalid_posts():
    """ 删除无效用户信息（个人主页不存在）的帖子 """
    postgres = DBPoolHelper(db_type='postgressql', dbname='dingxiangyuan', user='postgres', password='0000',
                            host='localhost', port='5432')
    data1 = pd.read_sql(sql='select distinct author_url from posts_replies', con=db_conn)
    data2 = pd.read_sql(sql='select distinct user_url_unquote from dingxiangke', con=db_conn)
    author_urls = set(data1['author_url']) - set(data2['user_url_unquote'])
    print(len(author_urls))
    for index, user_url in enumerate(author_urls):
        try:
            postgres.execute(sql='delete from posts_replies where author_url=%s', params=(user_url,))
            print(f'{index}\t删除成功\t{user_url}')
        except Exception as e:
            print(f'{index}\t删除失败\t{e}')
    postgres.connect_close()


def get_board_topic_top_dingxiangke():
    data = pd.read_excel("user_follower.xlsx")
    author_urls = data[2:]['user_url_unquote']
    dingxiangke_data = pd.read_sql("select * from dingxiangke", con=db_conn)
    user_df = dingxiangke_data[dingxiangke_data.user_url_unquote.isin(author_urls)].copy()
    user_df.sort_values(by='follower', ascending=False, inplace=True)
    top_user_df = user_df[:1000]
    top_user_df.to_csv("dingxiangke.csv", encoding='utf_8_sig', index=False)


def update_user_id(board_name="心血管"):
    data1 = pd.read_sql(sql="select distinct author_url from posts_replies where board_name=%s and floor=1",
                        con=db_conn, params=(board_name,))
    author_urls = data1['author_url']
    data2 = pd.read_sql(sql="select * from dingxiangke", con=db_conn)
    user_df = data2.loc[
        data2.user_url_unquote.isin(author_urls), ['user_url_unquote', 'user_name', 'following', 'follower']]
    user_df.sort_values(by='follower', ascending=False, inplace=True)
    user_df = user_df[:1000]
    for index, url in enumerate(user_df['user_url_unquote']):
        response = requests.get(url)
        user_id = re.search('gdata = {"userId": "(\d+)"', response.text).group(1)
        user_df.loc[user_df.user_url_unquote == url, 'user_id'] = user_id
    user_df.to_excel("user_follower.xlsx", index=False)


def get_board_topic_author_url():
    def add_redis_user_id(row):
        user_info = f'{row.user_url_unquote};{row.user_name};{row.following};{row.follower};{row.user_id}'
        redis_conn.sadd('user_follower_infos', user_info)

    user_df = pd.read_excel("user_follower.xlsx")
    user_df[2:].apply(add_redis_user_id, axis=1)


def get_user_follower_dict():
    data_dict = {}

    def get_dict(row):
        data_dict[row.user_id] = row.num

    data = pd.read_sql(sql="select user_id, count(*) as num from user_follower GROUP BY user_id;", con=db_conn)
    data.apply(get_dict, axis=1)
    return data_dict


@timeit
def get_user_topic_post_start_urls():
    JUTE_TOKEN = '9c271aa3-ffbf-4ea3-b021-13b57697be51'
    user_df = pd.read_excel('user_follower.xlsx')
    num = 0

    def get_topic_total_pages(user_id):
        nonlocal num
        num += 1
        url = f'http://i.dxy.cn/bbs/bbsapi/forsns?JUTE_TOKEN={JUTE_TOKEN}&s=user_topic&t=list&uid={user_id}&page=1&size=15'
        headers = {
            'Cookie': 'DXY_USER_GROUP=42; __auc=d205b571172799412796bf70504; _ga=GA1.2.642389535.1591177647; CLASS_CASTGC=3a167ea5a9e2314851400092a956e733e0d80ef565ff659b3984aa499ed34af7535bc7d25e1766ba16799e742c8ed0dbd24f6a8f72a32bcbd74d9d0791f1a5ff45f32e599b521d41047fa3041ec868db4fc021fe0e2d3c7f579a8c8c85de0419ef4292dff1c067138053129c49e48b99d7b7408903e6b8b7f2e7960ae22b38aadde6db6bc900526524f0897e206d6403e041e3d59d3cd08ae376c34e363d86a4999793999ba69ffe2f1bce9f855352e0f21e5c89838b9169721f0ab3d14f42a4963a507e306d5dd3d93be83908b761ff877ce0ba9d05e8bf5a7e4ce6bc169be1fff492edb712e9629ece8af98184f972283f20446e57d9f7804c32d2194f9024; JUTE_BBS_DATA=8867be01e7314365695377e3399cf32853329d05a7b58dab9fad869d790feed899be1912f006c9c4cefe3222d89b35aa8f58e0bcbd9b849b49c6e69ea1950cb580671d366be2287a014e1c965d02f595; home_hash=%23money; route_bbs=4b7df70fe96acfe89c32b68456fd2ed3; JUTE_SESSION_ID=760526e7-b824-4577-9a83-96190c609e6f; JUTE_TOKEN=a543cf85-ab31-4f31-8da4-eb98b36d9f76; route=6db93de1de9a21f214bd53b6a2c354d3; JSESSIONID=73DB57252CD9A853541EABFC7C984A3B; __utma=17875052.642389535.1591177647.1591675467.1591675467.1; __utmc=17875052; __utmz=17875052.1591675467.1.1.utmcsr=baidu|utmccn=(organic)|utmcmd=organic; __utmt=1; __utmb=17875052.1.10.1591675467; Hm_lvt_8a6dad3652ee53a288a11ca184581908=1591610496,1591615121,1591663078,1591675467; Hm_lpvt_8a6dad3652ee53a288a11ca184581908=1591675467; CMSSESSIONID=0874B9953D34D913C2E1D5F5232887CD-n1; __asc=1babefa517297403e6eeda32ab6; Hm_lvt_253e434fd63b62a2659ddd3e7412f769=1591615126,1591622542,1591663082,1591675470; __utma=251724881.642389535.1591177647.1591666045.1591675470.16; __utmc=251724881; __utmz=251724881.1591675470.16.6.utmcsr=dxy.cn|utmccn=(referral)|utmcmd=referral|utmcct=/; Hm_lpvt_253e434fd63b62a2659ddd3e7412f769=1591675493; __utmb=251724881.4.10.1591675470; JUTE_SESSION=c94a35c74c51c0203aed2168471de2dfee4dcb429f74c150e7a514a260d961119b808e875dd3eed51c6bc02200f367e2d448a85169df3ca180453d6883a2442803abd5a51ce8cb03',
            'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.75 Safari/537.36',
        }
        response = requests.get(url=url, headers=headers)
        data = response.json()
        print(num)
        return data['pageBean']['total']

    def get_post_total_pages(user_id):
        nonlocal num
        num += 1
        url = f'http://i.dxy.cn/bbs/bbsapi/forsns?JUTE_TOKEN={JUTE_TOKEN}&s=user_reply_json&t=list&uid={user_id}&page=1&size=15'
        headers = {
            'Cookie': 'DXY_USER_GROUP=42; __auc=d205b571172799412796bf70504; _ga=GA1.2.642389535.1591177647; CLASS_CASTGC=3a167ea5a9e2314851400092a956e733e0d80ef565ff659b3984aa499ed34af7535bc7d25e1766ba16799e742c8ed0dbd24f6a8f72a32bcbd74d9d0791f1a5ff45f32e599b521d41047fa3041ec868db4fc021fe0e2d3c7f579a8c8c85de0419ef4292dff1c067138053129c49e48b99d7b7408903e6b8b7f2e7960ae22b38aadde6db6bc900526524f0897e206d6403e041e3d59d3cd08ae376c34e363d86a4999793999ba69ffe2f1bce9f855352e0f21e5c89838b9169721f0ab3d14f42a4963a507e306d5dd3d93be83908b761ff877ce0ba9d05e8bf5a7e4ce6bc169be1fff492edb712e9629ece8af98184f972283f20446e57d9f7804c32d2194f9024; JUTE_BBS_DATA=8867be01e7314365695377e3399cf32853329d05a7b58dab9fad869d790feed899be1912f006c9c4cefe3222d89b35aa8f58e0bcbd9b849b49c6e69ea1950cb580671d366be2287a014e1c965d02f595; home_hash=%23money; route_bbs=4b7df70fe96acfe89c32b68456fd2ed3; JUTE_SESSION_ID=760526e7-b824-4577-9a83-96190c609e6f; JUTE_TOKEN=a543cf85-ab31-4f31-8da4-eb98b36d9f76; route=6db93de1de9a21f214bd53b6a2c354d3; JSESSIONID=73DB57252CD9A853541EABFC7C984A3B; __utma=17875052.642389535.1591177647.1591675467.1591675467.1; __utmc=17875052; __utmz=17875052.1591675467.1.1.utmcsr=baidu|utmccn=(organic)|utmcmd=organic; __utmt=1; __utmb=17875052.1.10.1591675467; Hm_lvt_8a6dad3652ee53a288a11ca184581908=1591610496,1591615121,1591663078,1591675467; Hm_lpvt_8a6dad3652ee53a288a11ca184581908=1591675467; CMSSESSIONID=0874B9953D34D913C2E1D5F5232887CD-n1; __asc=1babefa517297403e6eeda32ab6; Hm_lvt_253e434fd63b62a2659ddd3e7412f769=1591615126,1591622542,1591663082,1591675470; __utma=251724881.642389535.1591177647.1591666045.1591675470.16; __utmc=251724881; __utmz=251724881.1591675470.16.6.utmcsr=dxy.cn|utmccn=(referral)|utmcmd=referral|utmcct=/; Hm_lpvt_253e434fd63b62a2659ddd3e7412f769=1591675493; __utmb=251724881.4.10.1591675470; JUTE_SESSION=c94a35c74c51c0203aed2168471de2dfee4dcb429f74c150e7a514a260d961119b808e875dd3eed51c6bc02200f367e2d448a85169df3ca180453d6883a2442803abd5a51ce8cb03',
            'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.75 Safari/537.36',
        }
        response = requests.get(url=url, headers=headers)
        data = response.json()
        print(num)
        return data['pageBean']['total']

    user_df['topic_pages'] = user_df.user_id.apply(get_topic_total_pages)
    user_df['post_pages'] = user_df.user_id.apply(get_post_total_pages)
    user_df.to_excel('board_top_user.xlsx', index=False)
    print(user_df.info())


@timeit
def add_redis_user_topic_post_start_urls():
    def add_redis(row):
        redis_conn.sadd('user_top_user_start_infos',
                        f'{row.user_id};{row.user_name};{row.topic_pages};{row.post_pages}')

    user_df = pd.read_excel('board_top_user.xlsx')
    user_df[2:].apply(add_redis, axis=1)
    print(user_df[2:].topic_pages.sum())
    print(user_df[2:].post_pages.sum())


def insert_redis_dingxiangke_urls():
    data1 = pd.read_sql(sql='select distinct author_url from posts_replies', con=db_conn)
    data2 = pd.read_sql(sql='select distinct user_url_unquote from dingxiangke', con=db_conn)
    start_urls = set(data1['author_url']) - set(data2['user_url_unquote'])
    moderator_urls = pd.read_sql(sql="select distinct moderator_url_list from board", con=db_conn)
    moderator_urls_list = [url for moderator_list in moderator_urls['moderator_url_list'].str.split('; ') for url in
                           moderator_list]
    for url in start_urls:
        redis_conn.sadd('dingxiangke_start_urls', url)
    for url in moderator_urls_list:
        redis_conn.sadd('dingxiangke_start_urls', url)


if __name__ == '__main__':
    # 计算page_urls，并将page_urls插入redis
    # insert_redis_topic_page_urls()
    # insert_redis_dingxiangke_urls()
    # 插入帖子积分表
    # insert_into_topic_rate()
    # 删除无主题帖的帖子回复
    # delete_empty_topic_url()
    # 删除 无效用户信息的帖子
    # delete_user_invalid_posts()
    # 更新用户id
    # update_user_id()
    # 获取指定版块发帖者url
    get_board_topic_top_dingxiangke()
    # get_board_topic_author_url()
    # get_user_follower_dict()
    # get_user_topic_post_start_urls()
    # add_redis_user_topic_post_start_urls()
