# -*- coding: utf-8 -*-
"""
Created on Thu Aug 25 10:28:22 2022

@author: xiong
"""
import time
import random
import re
import logging
import hashlib
logger = logging.getLogger("facebook_spider")


from YmlReader import YmlReader
from RedisConnector import RedisConnector
from SqlExecuter import SqlExecuter
from ProxyValidTester import ProxyValidTester
from FacebookScraper import FacebookScraper

def split_task(task):
    task_info = task[1]
    if task_info["webType"] != "Facebook":return 
    task_id = task_info ["id"]
    task_name =task_info ["taskName"]
    page_limit = task_info["maxLimit"]
    page_size = task_info["pageSize"]
    if task_info["taskType"] == "1":
        url = task_info["ruleContent"]
    elif task_info["taskType"] == "2":
        pass
    missionlist = [task_info["contentType"]] + task_info["demand"].split(",") 
    reply_page = task_info['replyMaxLimit']
    reply_page_size = task_info['replyPageSize']
    related_page = task_info['relatedMaxLimit']
    related_page_size = task_info['relatedPageSize']
    exe_type = task_info['executeType']
    return task_id,task_name,page_limit,page_size,url,missionlist,reply_page,reply_page_size,related_page,related_page_size,exe_type


def random_walker(span):
    return max(span/2,random.normalvariate(mu=span, sigma=span/3))


class TaskReleaser():
    
    def receive_task(self,ch,method,properties,body):
        logger.info("receive task id = " + body.decode())
        reader = YmlReader()
        conf = reader.settings
        proxies = {"http": conf["proxies"]["http"], 
                   "https": conf["proxies"]["https"]} 
        
        redis_info = conf["redis"]
        """
        检测redis mysql 代理的可用性
        """
        self.RedisConnector = RedisConnector(redis_info["host"],redis_info["port"],redis_info["database"])
        self.SqlExecuter = SqlExecuter(body,reader)
        
        errorsign = False
        if self.RedisConnector.error:
            logger.info("redis connected fail,error:" + str(self.RedisConnector.error))
            errorsign = True
        if self.SqlExecuter.error:
            logger.info("database connected fail,error:" +str( self.SqlExecuter.error))
            errorsign = True
        if conf["using_proxies"]:
            self.ProxyValidTester = ProxyValidTester(proxies)
            if self.ProxyValidTester.error:
                logger.info("proxies connected fail,error:" + str(self.ProxyValidTester.error))
                errorsign = True
            
        if not errorsign:
            logger.info("redis、mysql、proxies  all are ready")
        else:
            #ch.basic_ack(delivery_tag=method.delivery_tag)
            return
        
        self.counter = 0
        sql = """SELECT * FROM cfg_gather_task where vc_id='%s'"""%body.decode()
        task_detail = self.SqlExecuter.execute_sql(sql,return_value = True)      
        self.total_counter = task_detail[0][14]
        
        """
        解析任务
        """
        #redis_key = 'spider:mq:task:' + body.decode()
    
        task_source_string = self.RedisConnector.hget("spider:mq:task",body.decode())
        if task_source_string:
            logger.info("Founding detail of task in redis")
            task_source_string = task_source_string.decode()
        else:
            logger.info("Detail of task Not Founded in redis")
            return
        task = eval(task_source_string.replace("null","None").replace("false","False").replace("true","True")) 
        task_info = split_task(task)
        if not task_info :
            return
        else:
            task_id,task_name,page_limit,page_size,url,missionlist,reply_page,reply_page_size,related_page,related_page_size,exe_type = task_info
        task_id_md5 = hashlib.md5(str(task_id).encode(encoding='UTF-8')).hexdigest() 
        """
        获取可用账号
        """  
        logger.info("start getting valid account")
        sql = """SELECT * FROM cfg_gather_user where vc_web_type='Facebook' and vc_status='PFB验证成功' """    
        accounts = self.SqlExecuter.execute_sql(sql,return_value = True) 
        if not accounts:
            status = 2
            sql = """UPDATE cfg_gather_task SET vc_status = %s WHERE vc_id = '%s'"""%(str(status),task_id)
            self.SqlExecuter.execute_sql(sql)
            logger.info("no valid account")
            return
        
        account = ""
        for index,i in enumerate(accounts):  
            statu = self.RedisConnector.get('spider:id:'+i[0])
            if not statu:
                self.RedisConnector.set("spider:id:" + i[0],"1")
                account = i[0]
                cookies = i[4]
                break
            elif statu == b"0":
                self.RedisConnector.set("spider:id:" + i[0],"1")
                account = i[0]
                cookies = i[4]
                break
            
        if not account:
            status = 2
            sql = """UPDATE cfg_gather_task SET vc_status = %s WHERE vc_id = '%s'"""%(str(status),task_id)
            self.SqlExecuter.execute_sql(sql)
            logger.info("no valid account")
            return   
    
        
            

        """
        初始化爬虫
        """
        
        
        logger.info("start getting posts list")
        scripy = FacebookScraper(taskid = task_id)
        scripy.set_proxies(proxies)
        scripy.set_cookies(cookies)
        try:
            has_gotten_posts = eval(self.RedisConnector.get(task_id_md5+":"+"hasgottenposts"))
            if not has_gotten_posts:
                has_gotten_posts = []
        except:
            has_gotten_posts = []
            
    
    
        """
        帖子链接
        """
        logger.info("start getting posturls list")
        post_data = {}
        postnum = 0
        for i in range(1,int(page_limit)):
            if i == 0:
                posts_url = url
                isFirstpage = 1
            else:
                isFirstpage = 0
            post_data,posts_url,soup = scripy.get_posts_url(posts_url,isFirstpage,post_data)
            if posts_url == "https://m.facebook.com" :
                status = 2
                sql = """UPDATE cfg_gather_task SET vc_status = %s WHERE vc_id = '%s'"""%(str(status),task_id)
                self.SqlExecuter.execute_sql(sql)
                logger.info("the target account is invalid")
                return
            postnum = len(scripy.posts_url)
            if postnum == 0:
                status = 2
                sql = """UPDATE cfg_gather_task SET vc_status = %s WHERE vc_id = '%s'"""%(str(status),task_id)
                self.SqlExecuter.execute_sql(sql)
                sql = """UPDATE cfg_gather_user SET vc_status = "被封禁" WHERE vc_id = %s"""%(account)
                self.SqlExecuter.execute_sql(sql)
                logger.info("grabbed fail,the account has been banned")
                return
            logger.info("grabbed posts num:" + str(len(scripy.posts_url)-postnum))
            self.RedisConnector.set("spider:id:" + i[0],"1")
            if postnum == len(scripy.posts_url):
                break
            
            for psturl in scripy.posts_url:
                if psturl in has_gotten_posts:
                    break
            time_span = random_walker(conf.config["time_span"])
            logger.info(f"sleep{time_span}s")
            time.sleep(time_span)
    
    
        """
        帖子
        """
        logger.info("start getting posts")
        posts_data = {}
        has_gotten_posts = []
        while scripy.posts_url:      
            post_url = scripy.posts_url.pop()
            post_id = re.findall(r"story_fbid=(.+?)&",post_url)
            if post_id:
                post_id = post_id[0]
                if post_id not in has_gotten_posts:
                    logger.info("start catch data on the post which id = " + post_id)
                    has_gotten_posts.append(post_id)
                    self.RedisConnector.set_key_value(task_id_md5+":"+"hasgottenposts",str(has_gotten_posts))
                    self.RedisConnector.set_key_value(task_id_md5+":"+"hasnotgottenposts",str(scripy.posts_url))
                    posts_data[post_url] = scripy.get_public_postsAndImageurlAndComments_from_child_url(post_url)
                    if "comments_num" in posts_data[post_url]: 
                        self.counter += 1 + posts_data[post_url]["comments_num"]
                    else:
                        self.counter += 1
                    sql = """UPDATE cfg_gather_task SET nm_today_total = %s WHERE vc_id = '%s'"""%(str(self.counter),task_id)
                    self.SqlExecuter.execute_sql(sql)
                    sql = """UPDATE cfg_gather_task SET nm_sum_total = %s WHERE vc_id = '%s'"""%(str(self.counter+self.total_counter),task_id)
                    self.SqlExecuter.execute_sql(sql)

                    logger.info("get %.0f imgurls,%.0f videourls,%.0f comments"%(len(posts_data[post_url]["image_url_and_description"]),len(posts_data[post_url]["vidio_url"]),len(posts_data[post_url]["comments"])))
                else:
                    time.sleep(10)
                    logger.info("repeat postid ,overriding")
                    break
                    
            time_span = random_walker(conf.config["time_span"])
            logger.info(f"sleep{time_span}s")
            time.sleep(time_span)
                
        """
        点赞转发
        """
        
        try:
            logger.info("start getting likes")
            if "like" in missionlist:
                for i in posts_data.keys()[:]:
                    time.sleep(random_walker(conf.config["time_span"]))
                    total_like_data = []
            
                    post_like_url = i["post_like_url"]
            
                    like_data_list,next_url = scripy.get_like_people(post_like_url,i)
            
                    if like_data_list : 
                        self.counter += len(like_data_list)
                        sql = """UPDATE cfg_gather_task SET nm_today_total = %s WHERE vc_id = '%s'"""%(str(self.counter),task_id)
                        self.SqlExecuter.execute_sql(sql)
                        sql = """UPDATE cfg_gather_task SET nm_sum_total = %s WHERE vc_id = '%s'"""%(str(self.counter+self.total_counter),task_id)
                        self.SqlExecuter.execute_sql(sql)
                        total_like_data += like_data_list 
                        
                        total_like_data += like_data_list 
            
                    for i in range(1,related_page):
                        time.sleep(random_walker(conf.config["time_span"]))
                        like_data_list,next_url = scripy.get_more_like_people(next_url,i)
                        
                        
                        if like_data_list : 
                            self.counter += len(like_data_list)
                            sql = """UPDATE cfg_gather_task SET nm_today_total = %s WHERE vc_id = '%s'"""%(str(self.counter),task_id)
                            self.SqlExecuter.execute_sql(sql)
                            sql = """UPDATE cfg_gather_task SET nm_sum_total = %s WHERE vc_id = '%s'"""%(str(self.counter+self.total_counter),task_id)
                            self.SqlExecuter.execute_sql(sql)
                            total_like_data += like_data_list 
                        if not next_url:
                            break
                    
                    posts_data[i]["post_like_data"] = total_like_data
            logger.info("start getting shares")
            if "share" in missionlist:
                for i in posts_data.keys()[:]:
                    time.sleep(random_walker(conf.config["time_span"]))
            
                    total_share_data = []
            
                    post_share_url = i["post_share_url"]
            
                    share_data_list,next_url = scripy.get_share_people(post_share_url,i)
            
                    if share_data_list : 
                        self.counter += len(share_data_list)
                        sql = """UPDATE cfg_gather_task SET nm_today_total = %s WHERE vc_id = '%s'"""%(str(self.counter),task_id)
                        self.SqlExecuter.execute_sql(sql)
                        sql = """UPDATE cfg_gather_task SET nm_sum_total = %s WHERE vc_id = '%s'"""%(str(self.counter+self.total_counter),task_id)
                        self.SqlExecuter.execute_sql(sql)
                        total_like_data += like_data_list 
                        total_share_data += share_data_list 
            
                    for i in range(1,related_page):
                        share_data_list,next_url = scripy.get_more_share_people(next_url,i)
                        if share_data_list : 
                            self.counter += len(share_data_list)
                            sql = """UPDATE cfg_gather_task SET nm_today_total = %s WHERE vc_id = '%s'"""%(str(self.counter),task_id)
                            self.SqlExecuter.execute_sql(sql)
                            sql = """UPDATE cfg_gather_task SET nm_sum_total = %s WHERE vc_id = '%s'"""%(str(self.counter+self.total_counter),task_id)
                            self.SqlExecuter.execute_sql(sql)
                            total_like_data += like_data_list 
                            total_like_data += share_data_list 
                        if not next_url:
                            break
                    posts_data[i]["post_share_data"] = total_share_data
                
            if str(exe_type) == "0":
                status = 0
                sql = """UPDATE cfg_gather_task SET vc_status = %s WHERE vc_id = '%s'"""%(str(status),task_id)
                self.SqlExecuter.execute_sql(sql)
                ch.basic_ack(delivery_tag=method.delivery_tag)
                self.RedisConnector.hdelete("spider:mq:task",body.decode())
                logger.info(task_id + " ended")
            elif str(exe_type) == "1":
                status = 3
                sql = """UPDATE cfg_gather_task SET vc_status = %s WHERE vc_id = '%s'"""%(str(status),task_id)
                self.SqlExecuter.execute_sql(sql)
                ch.basic_ack(delivery_tag=method.delivery_tag)
                self.RedisConnector.hdelete("spider:mq:task",body.decode())
                logger.info(task_id + " ended")
        except Exception as e:
            status = 2
            sql = """UPDATE cfg_gather_task SET vc_status = %s WHERE vc_id = '%s'"""%(str(status),task_id)
            self.SqlExecuter.execute_sql(sql)
            ch.basic_ack(delivery_tag=method.delivery_tag)
            self.RedisConnector.hdelete("spider:mq:task",body.decode())
            logger.info("error occuring in the process of grabbing share data:",e)

        