import pymysql
import pandas as pd
import json
import numpy as np 
from word_clound_day import main_wcd
from word_clound import main_wc
import os 
# 连接数据库
db = pymysql.connect(host='localhost', user = "root", passwd="123456", db="mysql", port=3306, charset="utf8")

# 分页工具
def getPage(pageSize, pageCurrent):
    limitStart = 0
    if pageCurrent < 0:
        limitStart = 0
    else:
        limitStart = pageCurrent * (pageSize - 1)
    data = {"limitStart":limitStart, "limitEnd":pageSize}
    return data

# 通过sql语句取出需要的数据
# 查询取出用户列表的所有信息
def get_users(param):
    pageSize = param['pageSize']
    pageCurrent = param['pageCurrent']
    page = getPage(pageSize, pageCurrent)
    mdb = pymysql.connect(host='localhost', user = "root", passwd="123456", db="mysql", port=3306, charset="utf8")
    # users = mdb.cursor()
    # users.execute("SELECT user FROM message")
    # k = (users.fetchall()[0])[0]
    cursor_user = mdb.cursor()
    cursor_user.execute("SELECT count(*) FROM user_star")
    count = cursor_user.fetchall()
    totleCount = count[0][0]
    #使用execute()方法执行sql语句
    sql = "SELECT * FROM user_star limit " + str(page['limitStart']) + ',' + str(page['limitEnd'])
    cursor_user.execute(sql)
    #使用fetchall()方法获得单条数据
    user_data = cursor_user.fetchall()
    result = {}
    result['data'] = list(user_data)
    result['pageSize'] = pageSize
    result['pageCurrent'] = pageCurrent
    # user_df['totlePage'] = totleCount/pageSize
    result['totleCount'] = totleCount
    cursor_user.close()
    mdb.close()
    return result

# 用户账号列表
def user_list(data):
    param = data
    result = get_users(param)
    user_df = pd.DataFrame(result['data'])
    json_user = to_json2(user_df)
    data_result =  {}
    data_result['pageSize'] = result['pageSize']
    data_result['pageCurrent'] = result['pageCurrent']
    data_result['totleCount'] = result['totleCount']
    user_list = []
    for i in json_user['data']:
        user_list.append(
            {'id': i[0], 'name': i[1], 'fans': i[2], 'attention': i[3], 'weibo': i[4]})
    data_result['data'] = user_list
    return data_result

# 查询取出微博内容列表的所有信息
def get_weibos(param):
    wdb = pymysql.connect(host='localhost', user = "root", passwd="123456", db="mysql", port=3306, charset="utf8")
    cursor_weibo = wdb.cursor()
    pageSize = param['pageSize']
    pageCurrent = param['pageCurrent']
    page = getPage(pageSize, pageCurrent)
    cursor_weibo.execute("SELECT count(*) FROM tb_weibos")
    count = cursor_weibo.fetchall()
    totleCount = count[0][0]
    #使用execute()方法执行sql语句
    sql = "SELECT * FROM tb_weibos limit "  + str(page['limitStart']) + ',' + str(page['limitEnd'])
    cursor_weibo = wdb.cursor()
    #使用execute()方法执行sql语句
    cursor_weibo.execute(sql)
    #使用fetchall()方法获得单条数据
    weibo_data = cursor_weibo.fetchall()
    result = {}
    result['data'] = list(weibo_data)
    result['pageSize'] = pageSize
    result['pageCurrent'] = pageCurrent
    # user_df['totlePage'] = totleCount/pageSize
    result['totleCount'] = totleCount
    cursor_weibo.close()
    wdb.close()
    return result

# 微博内容列表
def weibo_list(data):
    param = data
    weibo_list = []
    data_result =  {}
    result = get_weibos(param)
    weibo_df = pd.DataFrame(result['data'])
    data_result['pageSize'] = result['pageSize']
    data_result['pageCurrent'] = result['pageCurrent']
    data_result['totleCount'] = result['totleCount']
    json_weibo = to_json2(weibo_df)
    for i in json_weibo['data']:
        weibo_list.append(
            {'userid': i[3], 'time': i[1], 'context': i[2], 'dianzan': i[6], 'pinglun': i[5],'zhuanfa': i[4],'url':i[0]})
    #print(weibo_list)
    data_result['data'] = weibo_list
    return data_result


# 查询取出微博评论数前十的数据
cursor_reply = db.cursor()
#使用execute()方法执行sql语句
cursor_reply.execute("select userid,context,pinglun,url,time from tb_weibos ORDER BY -pinglun limit 0,10")
#使用fetchall()方法获得单条数据
weibo_reply = cursor_reply.fetchall()
reply_df = pd.DataFrame(list(weibo_reply))

# 评论数排行
def reply_list():
    json_reply = to_json2(reply_df)
    reply_list = []
    for i in json_reply['data']:
        reply_list.append(
            {'userid': i[0], 'context': i[1], 'pinglun': i[2],'url': i[3],'time': i[4]})
    #print(user_list)
    return reply_list

def set_config(param):
    config = param
    #print(config)
    sdb = pymysql.connect(host='localhost', user = "root", passwd="123456", db="mysql", port=3306, charset="utf8")
    cursor = sdb.cursor()
    sql = "truncate table message"
    cursor.execute(sql)
    
    sql = "insert IGNORE into message (time1,time2) values (%s,%s)"
    params = (config["time1"],config["time2"])
    cursor.execute(sql,params)
    sdb.commit()
    cursor.close()
    sdb.close()
    try:
        os.system("python word_clound_day.py")
        os.system("python word_clound.py")
    except: 
        print ("Error")    

# 查询取出微博转发数前十的数据
cursor_forward = db.cursor()
#使用execute()方法执行sql语句
cursor_forward.execute("select userid,context,zhuanfa,url,time from tb_weibos ORDER BY -zhuanfa limit 0,10")
#使用fetchall()方法获得单条数据
weibo_forward = cursor_forward.fetchall()
forward_df = pd.DataFrame(list(weibo_forward))

# 转发数排行
def forward_list():
    json_forward = to_json2(forward_df)
    forward_list = []
    for i in json_forward['data']:
        forward_list.append(
            {'userid': i[0], 'context': i[1],'zhuanfa': i[2],'url': i[3],'time': i[4]})
    #print(weibo_list)
    return forward_list

# 查询取出微博点赞数前十的数据
cursor_attention = db.cursor()
#使用execute()方法执行sql语句
cursor_attention.execute("select userid,context,dianzan,url,time from tb_weibos ORDER BY -dianzan  limit 0,10;")
#使用fetchall()方法获得单条数据
weibo_attention = cursor_attention.fetchall()
attention_df = pd.DataFrame(list(weibo_attention))

# 关注数排行
def attention_list():
    json_attention = to_json2(attention_df)
    attention_list = []
    for i in json_attention['data']:
        attention_list.append(
            {'userid': i[0], 'context': i[1], 'dianzan': i[2],'url': i[3],'time': i[4]})
    #print(user_list)
    return attention_list


# 查询取出微博活跃度前十的数据
cursor_active = db.cursor()
#使用execute()方法执行sql语句
cursor_active.execute("select `name`,weibo,weibos,url from user_star ORDER BY -weibos limit 0,10")
#使用fetchall()方法获得单条数据
weibo_active = cursor_active.fetchall()
active_df = pd.DataFrame(list(weibo_active))
# print(active_df)

# 活跃度排行
def active_list():
    json_active = to_json2(active_df)
    active_list = []
    for i in json_active['data']:
        active_list.append(
            {'name': i[0], 'weibo': i[1], 'weibos': i[2], 'url':i[3]})
    # print(user_list)
    return active_list


# 查询取出微博影响力前十的数据
cursor1 = db.cursor()
sql = "select userid,(`dianzan`+`pinglun`+`zhuanfa`) as yingxiang FROM tb_weibos"
cursor1.execute(sql)
weibo1 = cursor1.fetchall()
active1 = pd.DataFrame(list(weibo1))
a = active1.groupby(0).sum()
b = a.reset_index(drop=False)

cursor2 = db.cursor()
sql = "select userid,count(userid) from tb_weibos group by userid order by userid"
cursor2.execute(sql)
weibo2 = cursor2.fetchall()
c = pd.DataFrame(list(weibo2))
d = pd.merge(b,c,how='outer',on=0)

e = d.rename(columns ={'1_x':'1','1_y':'2'})
e['3'] = (e['1'])/(e['2'])
h = e.sort_values(by=['3'],ascending=False)
influence_df = h.head(11)

def influence1_list():
    json_influence1 = to_json2(influence_df)
    influence1_list = []
    for i in json_influence1['data']:
        influence1_list.append(
            {'userid': i[0],'yingxiang':i[1]/i[2]})
    #print(influence_list)
    return influence1_list

def to_json1(df, orient='split'):
    return df.to_json(orient=orient, force_ascii=False)


def to_json2(df, orient='split'):
    df_json = df.to_json(orient=orient, force_ascii=False)
    #print(df_json,2)
    return json.loads(df_json)

cursor_new = db.cursor()
sql_new = "select * from user_stars"
cursor_new.execute(sql_new)
weibo_new = cursor_new.fetchall()
active_new = pd.DataFrame(list(weibo_new))

def influence_list():
    json_influence = to_json2(active_new)
    influence_list = []
    for i in json_influence['data']:
        influence_list.append(
            {'name': i[0], 'weibo': i[1], 'url': i[4],'fans': i[2]})
    #print(influence_list)
    return influence_list
#关闭数据库连接
db.close()