import base64
import json
import os
import shutil
import time
import random
from collections import OrderedDict
from urllib.parse import quote_plus

import redis
import requests
from sqlalchemy import create_engine
import pandas as pd
from DrissionPage import ChromiumPage
from DrissionPage import ChromiumOptions
from DrissionPage.common import Settings
from DrissionPage.common import Keys
from DrissionPage.common import Actions
import pymysql
from DrissionPage.errors import *
from xhs_detail_up import *
import datetime


import logging
logging.basicConfig(
    level=logging.DEBUG,  # 设置日志级别
    format='%(asctime)s - %(levelname)s - %(message)s',  # 日志格式
    handlers=[
        logging.FileHandler('xhs_empower_login_b.log'),  # 将日志输出到文件
        logging.StreamHandler()  # 同时将日志输出到控制台
    ]
)

def xhs_config_file():
    import configparser

    # 创建一个ConfigParser对象
    config = configparser.ConfigParser()

    # 读取INI文件
    config.read('xhs_config.ini')

    # 获取所有节（section）的名称
    sections = config.sections()
    # print('Sections:', sections)

    # 获取特定节（section）的特定键（key）的值
    return sections

"""
https://customer.xiaohongshu.com/api/cas/customer/web/qr-code?service=https:%2F%2Fcreator.xiaohongshu.com&qr_code_id=68c517395516719772176435"""

class XHS_XPIDER:
    def __init__(self,user_id):
        self.CO = ChromiumOptions()  ##创建chrome实例
        # self.user_id = self.user_input()  ##用户
        self.user_id = str(user_id)  ##用户

        self.PORT = self.port_input()  ##chrome端口
        self.CO.set_paths(browser_path="browser_path='/Applications/Google Chrome.app/Contents/MacOS/Google Chrome")  ##chrome启动路径
        self.CO.set_local_port(self.PORT)  ##生成的端口 这里是手动生成
        self.CO.set_user_data_path(os.getcwd() + '/user_info_dir/{}'.format(self.user_id))  ##用户chrome文件夹
        # self.CO.arguments.append('--headless')
        self.ch = ChromiumPage(self.CO)


        self.ch.set.window.max()  ##chrome窗口最大化
        self.ac = Actions(self.ch)
        # self.fans = self.xhs_user_info_requests()
        print(self.user_id)
        self.redis_conn = redis.StrictRedis(host='r-bp162522qrwjh5e7jhpd.redis.rds.aliyuncs.com', port=6379,
                                            password='123sdmmh568%$#vs*!', db=22)

        self.xhs_config = xhs_config_file()

        # self.ch.wait.load_start()


    """从数据库判断用户是否存在"""
    def user_state(self):
        sql = "select spider_user_id,spider_port,spider_login_code from live_data.xhs_spider_config"
        result = self.pymysql_fetchall(sql)
        user_list = []
        for i in result:
            user_list.append(i[0])
        print(user_list)
        if self.user_id in user_list:
            print("存在")
            return "用户存在"
        else:
            return "用户不存在"



    """查询数据库"""

    def pymysql_fetchall(self, sql):

        con = pymysql.connect(host='rm-bp17r99475xuauk63yo.mysql.rds.aliyuncs.com', port=3306, user='live_data',
                              passwd='CJMg2tsHg7sj6bo0UTKq0a4T!fPDpJ', db='live_data', charset='utf8')
        # 获取操作数据的对象 cursor
        cursor = con.cursor()
        cursor.execute(sql)
        sql_result = cursor.fetchall()
        # 提交事务
        con.commit()
        # 关闭Cursor
        cursor.close()
        # 关闭链接
        con.close()

        return sql_result


    """自动随机生成端口 并进行检测"""
    def port_jiance(self):
        port1 = random.randint(5000, 9000)

        sql = "select spider_user_id,spider_port,spider_login_code from live_data.xhs_spider_config"
        spider_config = self.pymysql_fetchall(sql)
        port_list = []
        print(spider_config)
        for i in spider_config:
            port_list.append(i[1])
        user_exists = self.user_state()
        print(user_exists,'************')
        if user_exists == "用户不存在":
            if port1 not in port_list:
                return "端口不存在"
            else:
                return "端口存在"
        else:
            print('当前用户存在')
            return '用户存在'

    """根据用户ID查出端口号"""
    def port_fe(self):
        sql = "select spider_user_id,spider_port,spider_login_code from live_data.xhs_spider_config where spider_user_id='{}'".format(self.user_id)
        spider_config = self.pymysql_fetchall(sql)
        port = []
        for i in spider_config:
            port.append(i[1])

        return ''.join(port)

    """自动输入端口号"""
    def port_input(self):

        port1 = random.randint(5000, 9000)

        cc = self.port_jiance()
        while True:
            cc = self.port_jiance()
            if cc == "用户存在":
                print("当前用户存在")
                port_ = self.port_fe()
                print(port_)
                return port_
            if cc == "端口不存在":
                print(port1)
                return port1
            else:
                return self.PORT



    def mysql_config(self):
        user = 'live_data'
        password = quote_plus('CJMg2tsHg7sj6bo0UTKq0a4T!fPDpJ')
        # passwd ='merchantsasd123!@%&'
        host = 'rm-bp17r99475xuauk63yo.mysql.rds.aliyuncs.com'
        # port1 ='3306'
        dbname2 = 'live_data'
        engine2 = create_engine(f"mysql+pymysql://{user}:{password}@{host}:3306/{dbname2}?charset=utf8mb4")
        return engine2

    def pymysql_update_total(self, sql,data,c_time,user_id):

        con = pymysql.connect(host='rm-bp17r99475xuauk63yo.mysql.rds.aliyuncs.com', port=3306, user='live_data',
                              passwd='CJMg2tsHg7sj6bo0UTKq0a4T!fPDpJ', db='live_data', charset='utf8mb4')
        # 获取操作数据的对象 cursor
        cursor = con.cursor()
        cursor.execute(sql,(data,c_time,user_id))
        # sql_result = cursor.fetchall()
        # 提交事务
        con.commit()
        # 关闭Cursor
        cursor.close()
        # 关闭链接
        con.close()

        return "用户{}值成功,当前状态值".format(self.user_id)
    def pymysql_update(self, sql, num):

        con = pymysql.connect(host='rm-bp17r99475xuauk63yo.mysql.rds.aliyuncs.com', port=3306, user='live_data',
                              passwd='CJMg2tsHg7sj6bo0UTKq0a4T!fPDpJ', db='live_data', charset='utf8')
        # 获取操作数据的对象 cursor
        cursor = con.cursor()
        cursor.execute(sql, (num, self.user_id))
        # sql_result = cursor.fetchall()
        # 提交事务
        con.commit()
        # 关闭Cursor
        cursor.close()
        # 关闭链接
        con.close()

        return "用户{}状态修改成功,当前状态值为{}".format(self.user_id, num)

    """二维码获取并保存"""

    def qrcode_save(self):
        print("二维码获取中........")
        time.sleep(2)
        xhs_qrcode_text = self.ch.ele(
            'xpath://*[@id="page"]/div/div[2]/div[1]/div[2]/div/div/div/div/div/div[2]/img[1]')
        print(xhs_qrcode_text)
        xhs_qrcode_redis = str(xhs_qrcode_text).split("<ChromiumElement img src=")[1].split(" class='css-1lhmg90'>")[0].split('\n')[0]
        xhs_qrcode_save_png = str(xhs_qrcode_text).split("<ChromiumElement img src='data:image/png;base64,")[1].split("' class='css-1lhmg90'>")[0].split('\n')[0]

        img_base = xhs_qrcode_redis.encode("utf-8")
        img_base1 = xhs_qrcode_save_png.encode("utf-8")

        image_bytes = base64.b64decode(img_base1)

        self.redis_conn.set('xhs_qrcode:{}'.format(self.user_id), img_base)
        print("写入成功")
        self.redis_conn.expire('xhs_qrcode:{}'.format(self.user_id), 300)
        # with open(os.getcwd() + "/qrcode_img_dir/" + "{}.png".format(self.user_id), "wb") as f:
        #     f.write(image_bytes)



        return "用户{} 二维码获取成功".format(self.user_id)

    """获取小红书用户主页信息 user-id"""

    def listen_user_id(self,xhs_account):
        """监听小红书主页用户信息接口 拿到userID"""
        self.ch.listen.start('https://creator.xiaohongshu.com/api/galaxy/user/info')  # 开始监听，指定获取包含该文本的数据包
        print("正在监听用户信息")
        self.ch.refresh()
        time.sleep(2)
        user_info_list = []
        for packet in self.ch.listen.steps(timeout=20):
            print("******************************用户信息获取中******************************")
            print(packet.url)

            user_info = packet.response.body
            user_info = user_info["data"]
            user_info.update({"spider_user_id": self.user_id,"xhs_account":xhs_account})
            user_info_list.append(user_info)

            if "/user/info" in packet.url:
                break

        sql = "select userID from live_data.xhs_user_info"
        user_info_data = self.pymysql_fetchall(sql)
        xhs_user_info_list = []
        for i in user_info_data:
            xhs_user_info_list.append(i[0])
        print(xhs_user_info_list)
        refer_userID = user_info_list[0]["userId"]

        # if user_info_data == ():
        #     user_info_frame = pd.DataFrame(user_info_list)
        #
        #     user_info_frame = user_info_frame.astype("str")
        #     pd.io.sql.to_sql(user_info_frame, 'xhs_user_info', self.mysql_config(), schema='live_data',
        #                      if_exists='append',
        #                      index=False)
        if refer_userID in xhs_user_info_list:
            print("USERID 已存在")
        else:

            user_info_frame = pd.DataFrame(user_info_list)

            user_info_frame = user_info_frame.astype("str")
            user_info_frame["win"] = 'C'
            pd.io.sql.to_sql(user_info_frame, 'xhs_user_info', self.mysql_config(), schema='live_data',
                             if_exists='append',
                             index=False)
        return refer_userID
    # def xhs_user_info_requests(self):
    #     print("***开始获取用户主页***")
    #     """昵称"""
    #     nickname = self.ch.ele('xpath://*[@id="app"]/div/div[1]/div[1]/div[2]/h4')
    #     nickname = nickname.text
    #
    #     """关注"""
    #     follow = self.ch.ele('xpath://*[@id="app"]/div/div[1]/div[1]/div[2]/p[1]/span[1]/label')
    #     follow = follow.text
    #
    #     """粉丝"""
    #     fans = self.ch.ele('xpath://*[@id="app"]/div/div[1]/div[1]/div[2]/p[1]/span[2]/label')
    #     fans = fans.text
    #
    #     """获赞与收藏"""
    #     receive_likes_collect = self.ch.ele('xpath://*[@id="app"]/div/div[1]/div[1]/div[2]/p[1]/span[3]/label')
    #     receive_likes_collect = receive_likes_collect.text
    #
    #     """小红书账号"""
    #     xhs_account = self.ch.ele('xpath://*[@id="app"]/div/div[1]/div[1]/div[2]/p[1]/span[5]')
    #     xhs_account = xhs_account.text.split("小红书号：")[1]
    #
    #     """个人简介"""
    #     user_introduction = self.ch.ele('xpath://*[@id="app"]/div/div[1]/div[1]/div[2]/p[2]')
    #     user_introduction = user_introduction.text
    #
    #     """头像链接"""
    #     head_portrait_link = self.ch.ele('xpath://*[@id="app"]/div/div[1]/div[1]/div[1]/img')
    #     head_portrait_link = str(head_portrait_link).split("src='")[1].split("'")[0]
    #
    #     """获取主页数据总览 查看近30天内的数据 只能查看近三天内的数据"""
    #     print("***获取主页数据总览中***")
    #     time.sleep(1)
    #     self.ch.ele('xpath://*[@id="app"]/div/div[1]/div[2]/div[1]/div[2]/div/div/div/button/span').click()
    #     """点击近30天的数据总览按钮"""
    #     self.ch.ele('xpath://*[@id="app"]/div/div[1]/div[2]/div[1]/div[2]/div/div/div[2]/div/div[2]/div').click()
    #
    #     """获取近30天内数据总览数据"""
    #     added_fans = self.ch.ele('xpath://*[@id="app"]/div/div[1]/div[2]/div[2]/div[1]/span[2]')
    #     added_fans = added_fans.text
    #     homepage_visitor = self.ch.ele('xpath://*[@id="app"]/div/div[1]/div[2]/div[2]/div[2]/span[2]')
    #     homepage_visitor = homepage_visitor.text
    #     homepage_view = self.ch.ele('xpath://*[@id="app"]/div/div[1]/div[2]/div[2]/div[3]/span[2]')
    #     homepage_view = homepage_view.text
    #     interaction = self.ch.ele('xpath://*[@id="app"]/div/div[1]/div[2]/div[2]/div[4]/span[2]')
    #     interaction = interaction.text
    #
    #     """用户信息爬取时间"""
    #     current_time = datetime.datetime.now()
    #     current_time = str(current_time)[:-7]
    #
    #     user_info_json = {"spider_user_id": self.user_id, "spider_port": self.PORT, "nickname": nickname,
    #                       "follow": follow, "fans": fans, "receive_likes_collect": receive_likes_collect,
    #                       "xhs_account": xhs_account, "user_introduction": user_introduction,
    #                       "head_portrait_linkstr": head_portrait_link,
    #                       "added_fans": added_fans, "homepage_visitor": homepage_visitor,
    #                       "homepage_view": homepage_view, "interaction": interaction, "spider_time": current_time}
    #
    #     print(user_info_json)
    #     logging.info(f"Current User Info:{user_info_json}")
    #     xhs_user_info_frame = pd.DataFrame([user_info_json])
    #     print("***用户信息数据正在写入***")
    #     delete_user_sql = 'DELETE from live_data.xhs_user where spider_user_id=%s'
    #     print(self.delete_prod_user(delete_user_sql))
    #
    #     # pd.io.sql.to_sql(xhs_user_info_frame, 'xhs_user', self.mysql_config(), schema='live_data', if_exists='append',
    #     #                  index=False)
    #
    #     print("***用户信息数据写入完成***")
    #
    #     """爬虫user_id"""
    #     user_id  = self.listen_user_id(xhs_account)
    #     print(fans,"()()()()()")
    #     return fans,xhs_user_info_frame,xhs_account


    def xhs_user_info_listen(self):
        self.ch.refresh()
        self.ch.listen.start(
            'https://creator.xiaohongshu.com/api/galaxy/creator/home/personal_info')  # 开始监听，指定获取包含该文本的数据包

        user_info_xhs_json_list = []
        fans_list = []
        xhs_account_list = []
        for packet in self.ch.listen.steps(timeout=4):
            user_info_json = packet.response.body
            xhs_account = user_info_json["data"]["red_num"]
            xhs_account_list.append(xhs_account)
            fans = user_info_json["data"]["fans_count"]
            fans_list.append(fans)
            follow = user_info_json["data"]["follow_count"]
            receive_likes_collect = user_info_json["data"]["faved_count"]
            head_portrait_linkstr = user_info_json["data"]["avatar"]
            user_introduction = user_info_json["data"]["personal_desc"]
            nickname= user_info_json["data"]["name"]
            xhs_new_json = {"xhs_account":xhs_account,"fans":fans,"follow":follow,"receive_likes_collect":receive_likes_collect,
                            "head_portrait_linkstr":head_portrait_linkstr,"user_introduction":user_introduction,'nickname':nickname}
            user_info_xhs_json_list.append(xhs_new_json)

        # self.ch.refresh()
        """三十天数据"""
        # self.ch.listen.start(
        #     'https://creator.xiaohongshu.com/api/galaxy/creator/data/note_detail_new')

        # thirty_json_list = []
        # for packet_thirty in self.ch.listen.steps(timeout=3):
        #     thirty_json = packet_thirty.response.body
        #     added_fans = thirty_json["data"]["thirty"]["rise_fans_count"]
        #     homepage_view = thirty_json["data"]["thirty"]["view_time_avg"]
        #     homepage_visitor = thirty_json["data"]["thirty"]["view_count"]
        #     interaction = '0'
        current_time = datetime.datetime.now()
        current_time = str(current_time)[:-7]
        homepage_view = '0'
        homepage_visitor='0'
        added_fans = '0'
        interaction = '0'
        thirty_json_list = [{"spider_user_id": self.user_id, "spider_port": self.PORT, "added_fans": added_fans,
                       "homepage_view": homepage_view, "homepage_visitor": homepage_visitor,
                       "interaction": interaction, "spider_time": current_time}]
        # thirty_json_list.append(thirty_json)

        user_thirty_frame = pd.DataFrame(thirty_json_list)

        xhs_user_info_frame_ = pd.DataFrame(user_info_xhs_json_list)

        xhs_user_info_frame = pd.concat([xhs_user_info_frame_, user_thirty_frame], axis=1)
        xhs_user_info_frame = xhs_user_info_frame.astype("str")
        delete_user_sql = 'DELETE from live_data.xhs_user where spider_user_id=%s'
        print(self.delete_prod_user(delete_user_sql))
        userID = self.listen_user_id(xhs_account_list[0])
        # print(fans,"()()()()()")
        return fans_list[0], xhs_user_info_frame,xhs_account_list[0],userID


    """获取汇总笔记模块 切换近30天的数据"""

    def notes_switch_thirdy(self):
        print("***开始点击笔记数据***")

        """从主页点击笔记页面数据"""
        self.ch.ele('xpath://*[@id="content-area"]/main/div[1]/div/div[2]/div/div[4]/div/div').click()
        time.sleep(0.5)
        self.ch.refresh()
        self.ch.listen.start(
            'https://creator.xiaohongshu.com/api/galaxy/creator/data/note_detail_new')  # 开始监听，指定获取包含该文本的数据包

        """监听数据 笔记数据近30天的每天趋势图"""
        count_detail = 0
        aa = []
        for packet in self.ch.listen.steps(timeout=3):
            aa.append(packet.response.body)
            count_detail+=1
            if count_detail == 0:
                break
        if len(aa) == 0:

            return "没有监控到接口"
        else:
            pass
            # """笔记模块：笔记数据为近30天数据"""
            # ##笔记模块
            # self.ch.ele('xpath://*[@id="app"]/div/div[1]/div[1]/div[2]/div/div/div/button').click()
            # time.sleep(0.5)
            # self.ch.ele('xpath://*[@id="app"]/div/div[1]/div[1]/div[2]/div/div/div[2]/div/div[2]/div').click()
            #
            # ##单笔记数据
            # self.ch.ele('xpath://*[@id="app"]/div/div[2]/div[2]/div[2]/div/div/button').click()
            # time.sleep(0.5)
            # self.ch.ele('xpath://*[@id="app"]/div/div[2]/div[2]/div[2]/div/div[2]/div/div[2]/div').click()

        return "笔记页面数据点击完成"

    """获取汇总笔记模块 获取近30天的汇总数据  并插入MYSQL数据库"""

    def notes_data_thirdy(self):
        # notes_data_list = []
        # for i in range(1, 8):
        #     """获取笔记模块的汇总数据"""
        #     total_data = self.ch.ele('xpath://*[@id="app"]/div/div[1]/div[2]/div[{}]/span[2]'.format(i))
        #     notes_data_list.append(total_data.text)
        # 获取当前时间
        current_time = datetime.datetime.now()
        current_time = str(current_time)[:-7]
        # notes_json = {"spider_user_id": self.user_id, "spider_port": self.PORT, "view_sum_thirdy": notes_data_list[0],
        #               "total_view_time_thirdy": notes_data_list[1],
        #               "like_sum_thirdy": notes_data_list[2],
        #               "collect_sum_thirdy": notes_data_list[3], "comment_sum_thirdy": notes_data_list[4],
        #               "bullet_chat_sum_thirdy": notes_data_list[5], "notes_gained_follow_thirdy": notes_data_list[6],
        #               "spider_time": current_time}
        #
        # print("***笔记模块数据抓取成功***")
        #
        # """将用户id和端口 和汇总笔记插入数据库"""
        #
        # notes_total_data = pd.DataFrame([notes_json])
        #
        # pd.io.sql.to_sql(notes_total_data, 'xhs_notes_user', self.mysql_config(), schema='live_data',
        #                  if_exists='append', index=False)
        # print("***数据库插入成功***")
        cc = self.notes_switch_thirdy()
        print(cc)
        if cc == "没有监控到接口":
            data_result_list = {"spider_user_id": self.user_id, "spider_port": self.PORT, "view_list": '',
                                "view_time_list": '', "like_list": '', "collect_list": '',
                                "comment_list": '', "danmaku_list": '',
                                "home_view_list": '', "rise_fans_list": '',
                                "share_list": '', "spider_time": current_time}
            logging.info(f"Current User Thirty Notes Data:{data_result_list}")
            data_none = pd.DataFrame([{"notes_data_thirty": [data_result_list]}])
            data_none = data_none.astype("str")
            print(data_none)
            # pd.io.sql.to_sql(data_none, 'xhs_notes_thirty', self.mysql_config(), schema='live_data',
            #                  if_exists='append', index=False)
            return data_none
        print(cc,"&&&&&&&&&&&&&&&&&&&&&&&&&&&")
        if cc == "笔记页面数据点击完成":
            self.ch.refresh()
            self.ch.listen.start('https://creator.xiaohongshu.com/api/galaxy/creator/data/note_detail_new')  # 开始监听，指定获取包含该文本的数据包

            """监听数据 笔记数据近30天的每天趋势图"""
            count_detail = 0
            detail_list_data = []
            for packet in self.ch.listen.steps():
                detail_list_data.append(packet.response.body["data"]["thirty"])

                count_detail = count_detail+1
                if count_detail == 1:
                    break
                    # detail_list_data.append(packet.response.body)
            notes_thirty_list =[]
            for i in detail_list_data:
                view_list = i["view_list"]
                view_time_list = i["view_time_list"]
                like_list = i["like_list"]
                collect_list = i["collect_list"]
                comment_list = i["comment_list"]
                danmaku_list = i["danmaku_list"]
                home_view_list = i["home_view_list"]
                rise_fans_list = i["rise_fans_list"]
                share_list = i["share_list"]

                data_result_list = {"spider_user_id": self.user_id, "spider_port": self.PORT,"view_list":view_list,"view_time_list":view_time_list,"like_list":like_list,"collect_list":collect_list,"comment_list":comment_list,"danmaku_list":danmaku_list,
                 "home_view_list":home_view_list,"rise_fans_list":rise_fans_list,"share_list":share_list,"spider_time": current_time}
                # print(data_result_list)
                notes_thirty_list.append(data_result_list)

            data = pd.DataFrame([{"notes_data_thirty": notes_thirty_list}])
            # data["spider"]
            data = data.astype("str")
            print(data)
            # pd.io.sql.to_sql(data, 'xhs_notes_thirty', self.mysql_config(), schema='live_data',
            #                  if_exists='append', index=False)
            return data


    """将页码设置成48条每页"""

    def page_config(self):
        """获取页码48条"""
        self.ch.ele('xpath://*[@id="app"]/div/div[2]/div[4]/div[1]/div/div/div/div/input').click()
        time.sleep(1)
        self.ch.ele('xpath://*[@id="app"]/div/div[2]/div[4]/div[1]/div/div/div/div').click()

    """获取图文模块总共有多少个笔记 并有多少页码"""

    def Pagination_Page(self):
        """获取图文总条数和页码数"""
        page_num_total = self.ch.ele('xpath://*[@id="app"]/div/div[2]/div[4]/div[1]')
        notes_num = page_num_total.text.split("共 ")[1].split(" 条")[0]

        page_num = page_num_total.text.split('，')[1].split(' 页')[0]
        return int(notes_num), int(page_num)

    """获取图文笔记的详情信息"""

    def notes_details_requests(self):
        """将页码设置成48条每页"""
        self.page_config()
        time.sleep(1)
        """切换到图文模块"""
        # time.sleep(2)
        # self.ch.ele('xpath://*[@id="app"]/div/div[2]/div[2]/div[1]/div/button[3]/span').click()
        time.sleep(1)

        """调用上边方法 Pagination_Page"""
        notes_num, page_num = self.Pagination_Page()

        """抓取笔记图文数据"""
        """用户信息爬取时间"""
        current_time = datetime.datetime.now()
        current_time = str(current_time)[:-7]
        NOTES_TUWEN_LIST = []
        for i in range(1, page_num + 1):
            self.ch.ele("x://input[@class='dyn css-xf7229 css-1hsmx34']").input("{}\n".format(i))
            self.ch.ele("x://input[@class='dyn css-xf7229 css-1hsmx34']").clear(by_js=True)
            for j in range(1, notes_num + 1):
                notes_title = self.ch.ele(
                    'xpath://*[@id="app"]/div/div[2]/div[3]/div[{}]/div[1]/div[2]/span[1]'.format(j))
                notes_release_time = self.ch.ele(
                    'xpath://*[@id="app"]/div/div[2]/div[3]/div[{}]/div[1]/div[2]/span[2]'.format(j))
                notes_release_time = notes_release_time.text.split('发布于 ')[1]
                notes_view = self.ch.ele(
                    'xpath://*[@id="app"]/div/div[2]/div[3]/div[{}]/div[2]/ul[1]/li[1]/b'.format(j))
                notes_comment_count = self.ch.ele(
                    'xpath://*[@id="app"]/div/div[2]/div[3]/div[{}]/div[2]/ul[2]/li[1]/b'.format(j))
                notes_like_count = self.ch.ele(
                    'xpath://*[@id="app"]/div/div[2]/div[3]/div[{}]/div[2]/ul[1]/li[2]/b'.format(j))
                notes_share_count = self.ch.ele(
                    'xpath://*[@id="app"]/div/div[2]/div[3]/div[{}]/div[2]/ul[2]/li[2]/b'.format(j))
                notes_collect_count = self.ch.ele(
                    'xpath://*[@id="app"]/div/div[2]/div[3]/div[{}]/div[2]/ul[1]/li[3]/b'.format(j))
                notes_fans_hi = self.ch.ele(
                    'xpath://*[@id="app"]/div/div[2]/div[3]/div[{}]/div[2]/ul[2]/li[3]/b'.format(j))
                notes_cover_link = self.ch.ele('xpath://*[@id="app"]/div/div[2]/div[3]/div[{}]/div[1]/div[1]'.format(j))
                notes_cover_link = str(notes_cover_link).split('image: url("')[1].split('")')[0]
                notes_tuwen_json = {"spider_user_id": self.user_id, "notes_title": notes_title.text,
                                    "notes_release_time": notes_release_time, "notes_view": notes_view.text,
                                    "notes_comment_count": notes_comment_count.text,
                                    "notes_like_count": notes_like_count.text,
                                    "notes_collect_count": notes_collect_count.text,
                                    "notes_share_count": notes_share_count.text,
                                    "notes_collect_count": notes_collect_count.text,
                                    "notes_fans_hi": notes_fans_hi.text, "notes_cover_link": notes_cover_link,
                                    "spider_time": current_time, "notes_type": "tuwen"}
                print(notes_tuwen_json)
                NOTES_TUWEN_LIST.append(notes_tuwen_json)
        return NOTES_TUWEN_LIST

    """获取图文模块总共有多少个笔记 并有多少页码"""

    def video_page(self):
        """获取图文总条数和页码数"""
        page_num_total = self.ch.ele('xpath://*[@id="app"]/div/div[2]/div[4]/div[1]')
        notes_num = page_num_total.text.split("共 ")[1].split(" 条")[0]

        page_num = page_num_total.text.split('，')[1].split(' 页')[0]
        return int(notes_num), int(page_num)

    """抓取视频数据"""

    def notes_video_page_requests(self):
        print("***视频笔记总页码获取中***")
        self.ch.ele('xpath://*[@id="app"]/div/div[2]/div[2]/div[1]/div/button[2]/span').click()
        """获取页码48条"""
        self.ch.ele('xpath://*[@id="app"]/div/div[2]/div[4]/div[1]/div/div/div/div/input').click()
        time.sleep(1)
        """点击视频笔记模块的48条每页的按钮 切换到每页48条"""
        self.ch.ele('48条/页').click()

        """获取视频笔记模块中的页码和总条数"""
        page_num_total = self.ch.ele('xpath://*[@id="app"]/div/div[2]/div[4]/div[1]')
        video_notes_num = page_num_total.text.split("共 ")[1].split(" 条")[0]

        video_page_num = page_num_total.text.split('，')[1].split(' 页')[0]
        print(video_notes_num, video_page_num)

        return int(video_notes_num), int(video_page_num)

    """获取视频笔记的详情信息"""

    def video_notes_detail_info(self):
        print("***视频笔记详情数据抓取中***")
        video_notes_num, video_page_num = self.notes_video_page_requests()
        current_time = datetime.datetime.now()
        current_time = str(current_time)[:-7]
        NOTES_VIDEO_LIST = []

        for i in range(1, video_page_num + 1):
            self.ch.ele("x://input[@class='dyn css-xf7229 css-1hsmx34']").input("{}\n".format(i))
            self.ch.ele("x://input[@class='dyn css-xf7229 css-1hsmx34']").clear(by_js=True)
            for j in range(1, video_notes_num + 1):
                notes_release_time = self.ch.ele(
                    'xpath://*[@id="app"]/div/div[2]/div[3]/div[{}]/div[1]/div[2]/span[2]'.format(j))
                notes_title = self.ch.ele(
                    'xpath://*[@id="app"]/div/div[2]/div[3]/div[{}]/div[1]/div[2]/span[1]'.format(j))
                notes_view = self.ch.ele(
                    'xpath://*[@id="app"]/div/div[2]/div[3]/div[{}]/div[2]/ul[1]/li[1]/b'.format(j))
                notes_comment_count = self.ch.ele(
                    'xpath://*[@id="app"]/div/div[2]/div[3]/div[{}]/div[2]/ul[2]/li[1]/b'.format(j))
                notes_viewing_time = self.ch.ele(
                    'xpath://*[@id="app"]/div/div[2]/div[3]/div[{}]/div[2]/ul[1]/li[2]/b'.format(j))
                notes_bullet_chat = self.ch.ele(
                    'xpath://*[@id="app"]/div/div[2]/div[3]/div[{}]/div[2]/ul[2]/li[2]/b'.format(j))
                notes_like_count = self.ch.ele(
                    'xpath://*[@id="app"]/div/div[2]/div[3]/div[{}]/div[2]/ul[1]/li[3]/b'.format(j))
                notes_share_count = self.ch.ele(
                    'xpath://*[@id="app"]/div/div[2]/div[3]/div[{}]/div[2]/ul[2]/li[3]/b'.format(j))
                notes_collect_count = self.ch.ele(
                    'xpath://*[@id="app"]/div/div[2]/div[3]/div[{}]/div[2]/ul[1]/li[4]/b'.format(j))
                notes_fans_hi = self.ch.ele(
                    'xpath://*[@id="app"]/div/div[2]/div[3]/div[{}]/div[2]/ul[2]/li[4]/b'.format(j))
                notes_cover_link = self.ch.ele('xpath://*[@id="app"]/div/div[2]/div[3]/div[{}]/div[1]/div[1]'.format(j))
                notes_cover_link = str(notes_cover_link).split('image: url("')[1].split('")')[0]
                notes_video_json = {"spider_user_id": self.user_id, "notes_title": notes_title.text,
                                    "notes_release_time": notes_release_time.text, "notes_view": notes_view.text,
                                    "notes_comment_count": notes_comment_count.text,
                                    "notes_like_count": notes_like_count.text,
                                    "notes_share_count": notes_share_count.text,
                                    "notes_collect_count": notes_collect_count.text,
                                    "notes_fans_hi": notes_fans_hi.text, "notes_viewing_time": notes_viewing_time.text,
                                    "notes_bullet_chat": notes_bullet_chat.text, "notes_cover_link": notes_cover_link,
                                    "spider_time": current_time, "notes_type": "video"}

                print(notes_video_json)
                NOTES_VIDEO_LIST.append(notes_video_json)
        return NOTES_VIDEO_LIST

    """图文数据和视频数据汇总 并入库"""

    def video_tuwen_total(self):
        print("***开始汇总合并数据***")
        """图文笔记抓取"""
        NOTES_NOTES_LIST = self.notes_details_requests()

        """视频笔记抓取"""
        NOTES_VIDEO_LIST = self.video_notes_detail_info()

        NOTES_NOTES_FRAME = pd.DataFrame(NOTES_NOTES_LIST)
        NOTES_VIDEO_FRAME = pd.DataFrame(NOTES_VIDEO_LIST)
        data_frame = pd.concat([NOTES_VIDEO_FRAME, NOTES_NOTES_FRAME], axis=0)
        data_frame = data_frame.astype("str")
        print(data_frame)
        print("***笔记详情数据正在入库......")
        pd.io.sql.to_sql(data_frame, 'xhs_notes_detail', self.mysql_config(), schema='live_data', if_exists='append',
                         index=False)
        print("***笔记详情数据入库成功***")
        return '***图文笔记和视频笔记全部抓取成功***'

    """删除列"""

    def del_colu(self, frame):
        del_columns = ["color", "set_color", "set_title", "set_value"]

        for i in del_columns:
            for j in frame:
                del j[i]
        return frame

    """监听模块"""
    """粉丝数据模块 使用监听方法抓取，监听粉丝模块里边的两个包"""

    # def fans_listen_data(self):
    #     self.ch.refresh()
    #     time.sleep(1)
    #     """点击粉丝数据模块按钮"""
    #     self.ch.ele('xpath://*[@id="content-area"]/main/div[1]/div/div[2]/div/div[5]').click()
    #
    #     self.ch.listen.start(targets=['https://creator.xiaohongshu.com/api/galaxy/creator/data/fans_portrait_new',
    #                                   "data/fans_source"])  # 开始监听，指定获取包含该文本的数据包
    #
    #     count = 0
    #     """获取当前时间，作为爬虫抓取数据的时间"""
    #     current_time = datetime.datetime.now()
    #     current_time = str(current_time)[:-7]
    #     result = {}
    #     """监听数据 监听两个数据包"""
    #     for packet in self.ch.listen.steps():
    #
    #         count += 1
    #         """监听到粉丝汇总图的接口"""
    #         if packet.url == "https://creator.xiaohongshu.com/api/galaxy/creator/data/fans_portrait_new":
    #             listen_result = packet.response.body
    #             gender = listen_result["data"]["gender"]
    #             age = listen_result["data"]["age"]
    #             city = listen_result["data"]["city"]
    #             interest = listen_result["data"]["interest"]
    #
    #             gender = self.del_colu(gender)
    #             age = self.del_colu(age)
    #             city = self.del_colu(city)
    #             interest = self.del_colu(interest)
    #             result_json = {"gender": gender, "age": age, "city": city, "interest": interest}
    #             result.update(result_json)
    #
    #         """监听到粉丝来源的接口"""
    #         if packet.url == "https://creator.xiaohongshu.com/api/galaxy/creator/data/fans_source":
    #             listen_result_source = packet.response.body
    #             fans_source = listen_result_source["data"]
    #             fans_source = self.del_colu(fans_source)
    #
    #             fans_source_result_json = {"fans_source": fans_source}
    #             result.update(fans_source_result_json)
    #         if count == 2:
    #             break
    #
    #     fans_page_result = pd.DataFrame([result])
    #     fans_page_result["spider_user_id"] = self.user_id
    #     fans_page_result["spider_time"] = current_time
    #     fans_page_result = fans_page_result.astype("str")
    #     print(fans_page_result)
    #
    #     pd.io.sql.to_sql(fans_page_result, 'xhs_fans_info', self.mysql_config(), schema='live_data',
    #                      if_exists='append', index=False)
    #
    #     """最后一个模块抓取完成，关闭浏览器"""
    #     self.ch.close()
    #     return "粉丝模块抓取完成"


    """粉丝模块监控"""
    def fans_listen_thirty(self,fans):
        print(fans)

        # time.sleep(1)
        # self.ch.refresh()
        """点击粉丝数据模块按钮"""
        self.ch.ele('xpath://*[@id="content-area"]/main/div[1]/div/div[2]/div/div[5]').click()

        # self.ch.refresh()

        # time.sleep(3)
        # self.ch.listen.start('https://creator.xiaohongshu.com/api/galaxy/creator/data/fans/overall_new')  # 开始监听，指定获取包含该文本的数据包
        self.ch.listen.start(targets=['https://creator.xiaohongshu.com/api/galaxy/creator/data/fans_portrait_new',
                                      "data/fans_source",'https://creator.xiaohongshu.com/api/galaxy/creator/data/fans/overall_new'])  # 开始监听，指定获取包含该文本的数据包


        """获取当前时间，作为爬虫抓取数据的时间"""
        current_time = datetime.datetime.now()
        current_time = str(current_time)[:-7]
        """监听数据 监听两个数据包"""
        fans_thirty_list = []
        result = {}
        count = 0
        print(fans, "粉丝数*********")
        logging.info(f"Current USER FNAS NUM:{fans}")
        for packet in self.ch.listen.steps(timeout=15):
            count += 1

            print(fans,"粉丝数*********")
            if '万' in str(fans):
                fans = int(51)

            else:
                fans = fans
            if int(fans)<50 and packet.url == "https://creator.xiaohongshu.com/api/galaxy/creator/data/fans/overall_new":
                print("粉丝数小于50")


                # if packet.url == "https://creator.xiaohongshu.com/api/galaxy/creator/data/fans/overall_new":
                print("***************",count)
                print("获取到了")
                listen_result_thirty = packet.response.body
                rise_fans_list = listen_result_thirty["data"]["thirty"]["rise_fans_list"]
                leave_fans_list = listen_result_thirty["data"]["thirty"]["leave_fans_list"]
                fans_list = listen_result_thirty["data"]["thirty"]["fans_list"]
                listen_result_thirty_json = {"rise_fans_list": rise_fans_list, "leave_fans_list": leave_fans_list,
                                             "fans_list": fans_list}
                print(listen_result_thirty_json)
                result.update(listen_result_thirty_json)

                fans_thirty_list.append(listen_result_thirty_json)
                print("****",len(fans_thirty_list))

            if len(fans_thirty_list) ==1:
                break
            if int(fans)>=50:

                if packet.url == "https://creator.xiaohongshu.com/api/galaxy/creator/data/fans_portrait_new":
                    listen_result = packet.response.body
                    gender = listen_result["data"]["gender"]
                    age = listen_result["data"]["age"]
                    city = listen_result["data"]["city"]
                    interest = listen_result["data"]["interest"]

                    gender = self.del_colu(gender)
                    age = self.del_colu(age)
                    city = self.del_colu(city)
                    interest = self.del_colu(interest)
                    result_json = {"gender": gender, "age": age, "city": city, "interest": interest}
                    result.update(result_json)

                """监听到粉丝来源的接口"""
                if packet.url == "https://creator.xiaohongshu.com/api/galaxy/creator/data/fans_source":
                    listen_result_source = packet.response.body
                    fans_source = listen_result_source["data"]
                    fans_source = self.del_colu(fans_source)

                    fans_source_result_json = {"fans_source": fans_source}
                    result.update(fans_source_result_json)

                if packet.url == "https://creator.xiaohongshu.com/api/galaxy/creator/data/fans/overall_new":

                    listen_result_thirty1 = packet.response.body
                    print(listen_result_thirty1)
                    rise_fans_list = listen_result_thirty1["data"]["thirty"]["rise_fans_list"]
                    leave_fans_list = listen_result_thirty1["data"]["thirty"]["leave_fans_list"]
                    fans_list = listen_result_thirty1["data"]["thirty"]["fans_list"]
                    listen_result_thirty_json = {"rise_fans_list": rise_fans_list, "leave_fans_list": leave_fans_list,
                                                 "fans_list": fans_list}
                    print(listen_result_thirty_json)
                    result.update(listen_result_thirty_json)

                if count == 3:
                    break
        #
        dataframe_thirty = pd.DataFrame([{"fans_thirty": result}])
        # dataframe_thirty["spider_user_id"] = self.user_id
        # dataframe_thirty["spider_time"] = current_time
        dataframe_thirty = dataframe_thirty.astype("str")
        print(dataframe_thirty)
        # pd.io.sql.to_sql(dataframe_thirty, 'xhs_fans_thirty', self.mysql_config(), schema='live_data',
        #                  if_exists='append', index=False)

        return dataframe_thirty


    """登录二维码状态监控"""
    def login_stauts_listen(self):
        self.ch.listen.start('https://customer.xiaohongshu.com/api/cas/customer/web/qr-code?service=https:%2F%2Fcreator.xiaohongshu.com')

        login_status_list = []
        for packet in self.ch.listen.steps():
            print(packet.url)
            print(packet.response.body["data"])
            status_value = str(packet.response.body["data"]["status"])
            if status_value == "2":
                self.redis_conn.set('xhs_qrcode_status:{}'.format(self.user_id),'2')
                self.redis_conn.expire('xhs_qrcode_status:{}'.format(self.user_id), 300)
                print("等待扫码中")
            if status_value == "3":
                self.redis_conn.set('xhs_qrcode_status:{}'.format(self.user_id),'3')
                self.redis_conn.expire('xhs_qrcode_status:{}'.format(self.user_id), 300)

                print("扫码成功")

            if status_value == "4":
                self.redis_conn.set('xhs_qrcode_status:{}'.format(self.user_id),'4')
                self.redis_conn.expire('xhs_qrcode_status:{}'.format(self.user_id), 300)
                print("授权失败")
                return "授权超时或扫码失败"
            if status_value == '1':
                self.redis_conn.set('xhs_qrcode_status:{}'.format(self.user_id),'1')

                self.redis_conn.expire('xhs_qrcode_status:{}'.format(self.user_id), 300)
                print("登录成功")
                break
        return "小红书用户登录成功"

    """判断用户是否在数据库中 不在为新用户 在的话为老用户"""
    def user_info(self):
        engine2 = self.mysql_config()
        data_frame = pd.DataFrame([{"spider_user_id": str(self.user_id), "spider_port": str(self.PORT)}])
        # print(data_frame)
        # pd.io.sql.to_sql(data_frame, 'xhs_user_info', engine2, schema='live_data',if_exists='append',index=False)
        return data_frame

    """获取cookies"""

    def get_cookies(self):
        cookies_str = ''
        for i in self.ch.cookies(all_domains=False, all_info=True):
            my_ordered_dict = OrderedDict(i)

            # 取出前两个键值对
            first_two_items = list(my_ordered_dict.items())[:2]
            key_value_str = ''
            count_key_value = 0
            for i in first_two_items:
                count_key_value = count_key_value + 1
                if count_key_value == 1:
                    key_value_str = key_value_str + i[1] + '='
                else:
                    key_value_str = key_value_str + i[1]
            cookies_str = cookies_str + key_value_str + ";"
        # print(cookies_str)
        return cookies_str

    def dy_headers(self, cookies):

        headers = {
            "referer": "https://creator.xiaohongshu.com/new/note-manager",
            "Cookie": cookies
        }
        return headers

    """二次授权时候 删除之前的token 及chrome文件夹"""

    def delete_folder(self, chrome_id):
        try:
            shutil.rmtree(os.getcwd() + "\\user_info_dir\\" + chrome_id)
            print(f"成功删除文件夹")
        except OSError as e:
            print(f"删除文件夹失败")
    def notes_man(self):

        notes_list_all = []
        tags_list = []
        url_test = "https://creator.xiaohongshu.com/api/galaxy/creator/note/user/posted?tab=1&page=0"
        cookies = self.get_cookies()
        aa_test = requests.get(url_test, headers=self.dy_headers(cookies))
        re = json.loads(aa_test.text)
        if re["data"]["tags"] == []:
            return "no notes"
        else:
            tags = re["data"]["tags"]
            print(tags)
            notes_num_page = int(tags[0]["notes_count"])
            if notes_num_page <=11:
                notes_num_page = 1
            else:
                notes_num_page = int(notes_num_page / 11)+1

        for i in range(0, 1):
            # self.ch.ele('xpath://*[@id="content-area"]/main/div[1]/div/div[2]/div/div[2]/div/div').click()

            url = "https://creator.xiaohongshu.com/api/galaxy/creator/note/user/posted?tab=1&page={}".format(i)
            cookies = self.get_cookies()
            time.sleep(1)
            aa = requests.get(url, headers=self.dy_headers(cookies))
            data = json.loads(aa.text)
            print(data,"zhidaozhidaozhidao")
            # page = data["data"]['page']
            # print(page)
            tags = data["data"]["tags"]
            tags_list.append(tags)
            for j in data["data"]["notes"]:
                js_json = {"id": j["id"], "time": j["time"], "images_list": j["images_list"]}
                print(js_json,"zhidaozhidaozhidao")
                notes_list_all.append(js_json)
                # if page == -1:
                #     print("页码到此为止")
                #     break
        print(tags_list[0], "()()()()()()()()()()()()()()()()()")
        # if tags_list[0] == []:
        #     return "no notes"
        #
        # else:

        page_num = int(tags_list[0][0]["notes_count"])
        if page_num <= 12:
            page_num = 1
        else:
            page_num = page_num / 12
            if '.' in str(page_num):
                page_num = int(str(page_num).split(".")[0]) + 1
            else:
                page_num = page_num
        print("页码数为:",page_num)
        notes_list_all.extend(tags_list[0])
        print(notes_list_all)
        notes_result = {"data_desc": notes_list_all}
        data_frame = pd.DataFrame([notes_result])
        data_frame = data_frame.astype("str")
        print(data_frame)
        return data_frame,page_num
    def notes_de(self,page_num):
        print(page_num)
        desc_list = []
        # int(page_num) + 1
        # if int(page_num)>=30:
        #     page_num = 30
        # else:
        #     page_num = int(page_num)
        for i in range(1,int(1)+1):
            url = "https://creator.xiaohongshu.com/api/galaxy/creator/data/note_stats/new?page={}&page_size=12&sort_by=time&note_type=0&time=7&is_recent=false".format(i)
            cookies = self.get_cookies()
            notes_desc = requests.get(url, headers=self.dy_headers(cookies))
            notes_desc = json.loads(notes_desc.text)
            desc_list.append(notes_desc['data'])
        notes_result = {"data_desc_notes":desc_list}
        data_frame = pd.DataFrame([notes_result])
        data_frame = data_frame.astype("str")
        return data_frame

    """二次授权时候 删除之前的token 及chrome文件夹"""

    def update_two_login(self, sql):

        con = pymysql.connect(host='rm-bp17r99475xuauk63yo.mysql.rds.aliyuncs.com', port=3306, user='live_data',
                              passwd='CJMg2tsHg7sj6bo0UTKq0a4T!fPDpJ', db='live_data', charset='utf8')
        # 获取操作数据的对象 cursor
        cursor = con.cursor()
        cursor.execute(sql)
        # sql_result = cursor.fetchall()
        # 提交事务
        con.commit()
        # 关闭Cursor
        cursor.close()
        # 关闭链接
        con.close()

        return "修改成功"

    """账号过期，二次授权逻辑"""
    def secondary_authorization(self,fans_num):
        sql = "select xhs_account,spider_user_id from live_data.xhs_user where type='0'"
        login_sql = self.pymysql_fetchall(sql)
        xhs_account_list = []
        for x_acc in login_sql:
            xhs_account_list.append(x_acc[0])
        print(fans_num)
        print(xhs_account_list)
        if fans_num[2] in xhs_account_list:
            print("&&&&&&&&&&&…账号存在的&&&&&&&&&&&&")
            sql = "select spider_user_id from live_data.xhs_user where xhs_account='{}' and type='0'".format(
                fans_num[2])
            sql_1 = self.pymysql_fetchall(sql)

            if self.user_id != sql_1[0][0]:
                sql = "delete from live_data.xhs_spider_config where spider_user_id='{}'".format(sql_1[0][0])
                self.delete_two_login(sql)

                print("走大这里了")
                dele_sql = "delete from live_data.xhs_user where spider_user_id='{}' and type='0'".format(sql_1[0][0])
                self.delete_two_login(dele_sql)
                print(os.getcwd() + "/code_inter/user_info_dir/" + sql_1[0][0])
                self.delete_folder(sql_1[0][0])
                delete_sql_total_data = "delete from live_data.xhs_total_data where spider_user_id='{}' and type='0'".format(
                    sql_1[0][0])
                self.delete_folder(sql_1[0][0])
                self.delete_two_login(delete_sql_total_data)

                """更新爬虫所需user_info表"""

                update_user_info_data = "update live_data.xhs_user_info set spider_user_id='{}',win='C' where xhs_account='{}'".format(
                    self.user_id, fans_num[2])
                self.update_two_login(update_user_info_data)

    def pymysql_update_notes_id_detail(self,sql, id_list):

        con = pymysql.connect(host='rm-bp17r99475xuauk63yo.mysql.rds.aliyuncs.com', port=3306, user='live_data',
                              passwd='CJMg2tsHg7sj6bo0UTKq0a4T!fPDpJ', db='live_data', charset='utf8')
        # 获取操作数据的对象 cursor
        cursor = con.cursor()
        cursor.execute(sql, id_list)
        # sql_result = cursor.fetchall()
        # 提交事务
        con.commit()
        # 关闭Cursor
        cursor.close()
        # 关闭链接
        con.close()

        return "修改成功"

    """计算有多少个notes，并更新到xhs_notes_account表里面"""



    def notes_account_huizong(self,xhs_account):
        """笔记详情计算"""
        import ast
        data = pd.read_sql(
            "select data_desc,xhs_account from live_data.xhs_user where xhs_account='{}'".format(xhs_account),
            self.mysql_config())

        data_list = [data.loc[i].to_dict() for i in data.index.values]
        for i in data_list:
            notes_account_data = pd.read_sql(
                "select xhs_account from live_data.xhs_notes_account where xhs_account='{}'".format(i["xhs_account"]),
                self.mysql_config())
            notes_detail_list = ast.literal_eval(i["data_desc"])
            notes_detail_list = [d for d in notes_detail_list if d["id"] != "special.note_time_desc"]
            notes_detail_list = [d for d in notes_detail_list if d["id"] != "special.video"]

            id_list = []
            for j in notes_detail_list:
                id_list.append(j["id"])
            if notes_account_data.empty:

                data_frame = pd.DataFrame([{"xhs_account": i["xhs_account"], "notes_id": id_list}])
                print(data_frame)
                data_frame = data_frame.astype("str")
                pd.io.sql.to_sql(data_frame, 'xhs_notes_account', self.mysql_config(), schema='live_data',
                                 if_exists='append',
                                 index=False)
            else:
                sql = "update live_data.xhs_notes_account set notes_id = %s where xhs_account='{}'".format(
                    i["xhs_account"])
                print(self.pymysql_update_notes_id_detail(sql, str(id_list)))

        return "计算完成"

    def get_xhs_detail(self,note_ids):
        '''xhs app详情'''
        note_id = note_ids
        token = "xhs_f2807eac95279ad0a4949bf3ede93637_detail"
        url = f"http://47.98.165.184:3007/xhs/detail?token={token}&note_id={note_id}"
        res = requests.get(url)
        return res.text



    def push_data_xhs_detail(self,data):

        data = {
            "bizType": 1,
            "env": 2,
            "secretKey": "Nqfep27LYY3ESEldvALHGnv6Ds56Cs4k",
            "data": data
        }
        # url = "http://apia.intranet.linkinbd.com/tencent/python/pushXhsScrawlDetailData"
        url = "http://127.0.0.1:8085/tencent/python/pushXhsScrawlDetailData"
        # url = self.xhs_config["push.data.address"][""]
        code_status = requests.post(url, json=data)
        print(code_status.text)
        print("详情数据已推送完成")
        return code_status.text
    def desc_com(self,xhs_account):

        import ast
        data = pd.read_sql("select * from live_data.xhs_notes_account where xhs_account='{}'".format(xhs_account),self.mysql_config())
        notes_desc_data = pd.read_sql("select xhs_account,notes_id from live_data.xhs_notes_desc where xhs_account='{}'".format(xhs_account),self.mysql_config())
        notes_desc_data_list = notes_desc_data["notes_id"].values.tolist()
        data_list = [data.loc[i].to_dict() for i in data.index.values]
        for i in data_list:
            notes_detail_list = ast.literal_eval(i["notes_id"])
            difference = list(set(notes_detail_list) - set(notes_desc_data_list))
            print("此次新增笔记 ",len(difference),"篇")
            if len(difference) == 0:
                print("*****************不用计算详情数据*****************")
            else:
                for j in difference:
                    result = self.get_xhs_detail(j)
                    # print(result)
                    data_frame = pd.DataFrame([{"notes_id": j, 'notes_desc': result, "xhs_account": i["xhs_account"]}])
                    data_frame = data_frame.astype("str")
                    print(data_frame)

                    pd.io.sql.to_sql(data_frame, 'xhs_notes_desc', self.mysql_config(), schema='live_data',
                                     if_exists='append',
                                     index=False)

        print("push_data")
        print("在这里推送数据，推送整个账号下边的所有笔记")
        push_notes_detail_data = pd.read_sql("select * from live_data.xhs_notes_desc where xhs_account='{}'".format(xhs_account),self.mysql_config())
        print("下边数据为",xhs_account,"账号下所有笔记的详情数据")
        print(push_notes_detail_data)
        data_list_desc = [push_notes_detail_data.loc[i].to_dict() for i in push_notes_detail_data.index.values]


        # xhs_unique_id = pd.read_sql("select userId from live_data.xhs_user_info where spider_user_id='{}'".format(self.user_id))
        # xhs_unique_userId = xhs_unique_id["userId"].values.tolist()[0]
        push_detail_data = json.dumps({"xhs_account":xhs_account,'data':data_list_desc},ensure_ascii=False)

        self.push_data_xhs_detail(push_detail_data)
        return "所有笔记详情计算完成，并入库成功!"

    """爬虫主程序"""
    def xhs_huizong(self):
        start_time = time.time()
        # logging.debug(f"Input user={self.user_id}, port={self.PORT}")
        # logging.info(f"CURRENT COMPUTING MACHINE={'B'}")
        print('用户', self.user_id, '此次的端口是', self.PORT)
        # self.ch.get('https://creator.xiaohongshu.com/creator/notes')
        self.ch.get('https://creator.xiaohongshu.com/new/home')
        time.sleep(2)
        user_it_exist = self.user_state()
        data_frame = self.user_info()
        if user_it_exist == "用户不存在":
            # logging.info("当前x用户为新用户:",self.user_id)

            data_frame["spider_login_code"] = '0'
            pd.io.sql.to_sql(data_frame, 'xhs_spider_config', self.mysql_config(), schema='live_data',
                             if_exists='append', index=False)


        print("走到这里了")
        login_url_is = self.ch.url
        if login_url_is == "https://creator.xiaohongshu.com/login?source=&redirectReason=401&lastUrl=%252Fnew%252Fhome" or login_url_is == "https://creator.xiaohongshu.com/login?source=&redirectReason=401&lastUrl=%252Fcreator%252Fnotes":
            print("登录过期或未登录")
            self.ch.ele('xpath://*[@id="page"]/div/div[2]/div[1]/div[2]/div/div/div/div/img').click()
            time.sleep(1)
            """二维码保存"""
            """需要登录时,将数据库用户状态设为0"""
            sql = 'update live_data.xhs_spider_config set spider_login_code = %s where spider_user_id = %s'
            self.pymysql_update(sql, num='0')
            print("***数据库用户状态修改成功***")
            print(self.qrcode_save())
            end_time = time.time()
            print("***********二维码获取时间***********",end_time - start_time)


            login_status = self.login_stauts_listen()
            if login_status == "小红书用户登录成功":
                print(self.user_id,"登录成功了")
                """授权成功，将数据库用户的状态改为1"""
                sql = 'update live_data.xhs_spider_config set spider_login_code = %s where spider_user_id = %s'
                print(self.pymysql_update(sql, '1'))
            if login_status == "授权超时或扫码失败":
                self.ch.close()
                logging.info(f"Current User No Login: {self.user_id}")
                logging.info(f"Current User Empower TIMEOUT:{self.user_id},qrcode={'4'}")
                return {"code":"500","msg":"授权超时或扫码取消或失败"}

                    # print("用户授权超时，退出程序")
                    # """授权超时，关闭浏览器"""
                    # self.ch.close()
                    # return {"code":'500',"msg":"用户授权超时，请重新访问扫码"}
        else:
            logging.info(f"Current User No Login: {self.user_id}")

            self.redis_conn.set('xhs_qrcode:{}'.format(self.user_id), '"no_login"')
            self.redis_conn.expire('xhs_qrcode:{}'.format(self.user_id), 300)

            print("不用登录******")
        """默认登录的情况下 返回首页抓取数据"""
        time.sleep(2)
        # self.ch.get('https://creator.xiaohongshu.com/creator/home')
        # self.ch.ele('xpath://*[@id="content-area"]/main/div[1]/div/div[2]/div/div[1]/div/div').click()
        time.sleep(1)
        fans_num = self.xhs_user_info_listen()
        # fans_num = self.xhs_user_info_requests()
        xhs_account = fans_num[2]
        logging.info(f"CURRENT USER XHS_ACCOUNT={xhs_account}")
        """二次授权逻辑"""

        # self.secondary_authorization(fans_num)

        print("""***开始切换近30天内汇总笔记***""")
        self.notes_switch_thirdy()

        print("""***开始获取近30天内汇总笔记***""")

        """汇总笔记模块，获取笔记近30天的汇总数据"""
        notes_thirty = self.notes_data_thirdy()

        # time.sleep(2)
        # """图文数据和视频数据汇总 并入库"""
        #
        # print(self.video_tuwen_total())
        #
        # self.fans_listen_data()
        fans_thirty = self.fans_listen_thirty(fans_num[0])
        time.sleep(2)
        hz = self.notes_man()
        if hz == "no notes":
            logging.info(f"CURRENT USER NO NOTES={self.user_id}")

            notes_result_1 = {"data_desc_notes": ''}
            data_frame_1 = pd.DataFrame([notes_result_1])

            notes_result_0 = {"data_desc": ''}
            data_frame_0 = pd.DataFrame([notes_result_0])
            data_huizong = pd.concat([fans_num[1], data_frame_0, notes_thirty, fans_thirty,data_frame_1], axis=1)

            print(data_huizong)
            data_huizong["type"] = '0'
            data_huizong["userID"] = fans_num[3]
            pd.io.sql.to_sql(data_huizong, 'xhs_user', self.mysql_config(), schema='live_data', if_exists='append',
                             index=False)
            self.ch.close()
            self.redis_conn.set('xhs_qrcode_status:{}'.format(self.user_id), "200")
            self.redis_conn.expire('xhs_qrcode_status:{}'.format(self.user_id), 300)
            return {"code": 200, "msg": "程序执行完成，数据已全部入库"}
        else:

            notes_desc_info = self.notes_de(hz[1])
            data_huizong = pd.concat([fans_num[1], hz[0],notes_thirty,fans_thirty,notes_desc_info], axis=1)
            print(data_huizong)
            data_huizong["type"] = '0'
            data_huizong["userID"] = fans_num[3]

            pd.io.sql.to_sql(data_huizong, 'xhs_user', self.mysql_config(), schema='live_data', if_exists='append',index=False)

            self.ch.close()
            self.redis_conn.set('xhs_qrcode_status:{}'.format(self.user_id),"200")
            self.redis_conn.expire('xhs_qrcode_status:{}'.format(self.user_id), 300)
            # notes_account_huizong = self.notes_account_huizong(fans_num[2])
            # if notes_account_huizong == "计算完成":
            #     self.desc_com(fans_num[2])
            # else:
            #     pass
            return {"code":200,"msg":"程序执行完成，数据已全部入库"}

    def delete_prod_user(self,sql):

        con = pymysql.connect(host='rm-bp17r99475xuauk63yo.mysql.rds.aliyuncs.com', port=3306, user='live_data', passwd='CJMg2tsHg7sj6bo0UTKq0a4T!fPDpJ', db='live_data', charset='utf8')
        # 获取操作数据的对象 cursor
        cursor = con.cursor()
        cursor.execute(sql, self.user_id)
        # sql_result = cursor.fetchall()
        # 提交事务
        con.commit()
        # 关闭Cursor
        cursor.close()
        # 关闭链接
        con.close()
        # time.sleep(2)
        return "删除成功"

    """二次授权所需删除据逻辑 mysql"""
    def delete_two_login(self,sql):

        con = pymysql.connect(host='rm-bp17r99475xuauk63yo.mysql.rds.aliyuncs.com', port=3306, user='live_data', passwd='CJMg2tsHg7sj6bo0UTKq0a4T!fPDpJ', db='live_data', charset='utf8')
        # 获取操作数据的对象 cursor
        cursor = con.cursor()
        cursor.execute(sql)
        # sql_result = cursor.fetchall()
        # 提交事务
        con.commit()
        # 关闭Cursor
        cursor.close()
        # 关闭链接
        con.close()
        # time.sleep(2)
        return "删除成功"


    def push_data_xhs(self):

        data = {
            "bizType": 1,
            "env": 2,
            "random": "{}".format(self.user_id),
            "secretKey": "Nqfep27LYY3ESEldvALHGnv6Ds56Cs4k"
        }
        # url = "http://127.0.0.1:8085"
        # url = "http://localhost:8085/tencent/python/pushXhsRpaData"
        # url =
        url = "http://localhost:8085/tencent/login/clients/getQrcodeStatus"

        code_status = requests.post(url, json=data)
        print("小红书数据推送成功",code_status.text)
        logging.info(f"XHS Total Data Push Status:{code_status}")

        return code_status.text
    def push_data(self):
        data = pd.read_sql("select * from live_data.xhs_user where spider_user_id='{}'".format(self.user_id),self.mysql_config())

        data_list = [data.loc[i].to_dict() for i in data.index.values]

        for i in data_list:
            json_data = {"userID":i["userID"],"spider_user_id": i["spider_user_id"], "xhs_account": i["xhs_account"], "data": i}
            sql_cha = "select * from live_data.xhs_total_data where spider_user_id='{}' and type='{}'".format(i["spider_user_id"],'0')
            sql_cha_data = self.pymysql_fetchall(sql_cha)
            if sql_cha_data == ():

            # sql = 'update live_data.xhs_total_data set spider_user_id = %s and xhs_account = %s and data=%s and type=%s where spider_user_id=%s'
            # current_time = datetime.datetime.now()
            # current_time = str(current_time)[:-7]
            # self.pymysql_update_total(sql, i["spider_user_id"],i["xhs_account"],str(i),'0',i["spider_user_id"])
                data_total = pd.DataFrame([json_data])
                data_total = data_total.astype("str")
                # print(data_total)
                data_total["type"] = '0'
                pd.io.sql.to_sql(data_total, 'xhs_total_data', self.mysql_config(), schema='live_data',
                                 if_exists='append', index=False)  # aaaa = [{"data": data_list}]
            else:
                print("更改数据")

                spider_user_id = i["spider_user_id"]
                current_time = datetime.datetime.now()
                iii = str(i)
                if i["spider_user_id"] == self.user_id:
                    logging.info(f"Current User xhs_total_data Data:{i}")
                sql = 'update live_data.xhs_total_data set data = %s , time=%s where spider_user_id=%s and type="0"'
                self.pymysql_update_total(sql,iii,current_time,spider_user_id)
        self.push_data_xhs()
        return "推送完成"


    def xhs_main_app(self):
        print(self.user_id)
        console_info = self.xhs_huizong()
        if console_info["code"] == "500":
            print("不推送给数据")
        else:
            self.push_data()

        if console_info == {'code': 200, 'msg': '程序执行完成，数据已全部入库'}:
            print("走到这里了")
            notes_detail_result = notes_detail_by_day(self.user_id)
            print(notes_detail_result)
            logging.info(f"异步详情接口计算完成！")
        else:
            pass
        logging.info(f"CURRENT CODE FINISH!")
        return console_info




# xhs = XHS_XPIDER(user_id='fanfanfan')
# aa = xhs.xhs_main_app()
# print(aa)

