import base64
import json
import os
import sys
from collections import namedtuple
from concurrent.futures.thread import ThreadPoolExecutor
from typing import Union
import random
import re
import threading
import time
import traceback
from datetime import datetime
from urllib.parse import quote

import requests
from redis import Redis
from Log import Log
from queue import Queue
import subprocess
from parse_data import solve_post_info, solve_user_info
from do_api_request import (
    get_post,
    get_user_timeline_post,
    get_search_post,
    get_response_from_browser,
    get_group_timeline_post, get_user_profile, get_response_from_go_server
)
from script.app import StopFlag
from script.facebook import Facebook
from core.new_device import Device
from loguru import logger
# from fb_spider_api import solve_post_info
from plugin.wechat_bot import WechatBot
from utils.check_network import setting_network
from utils.redis_client import get_redis_client

seed_queue_name = 'fb:seeds'
redis_client = None
wechat = WechatBot('https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key=3cfb6760-4ae6-4aca-8209-f74f16c92638')
seed_queue = Queue()
taskings = []
D = namedtuple("Device", ["id", "group"])

cookies_pool = {}


def is_redis_running():
    """检查 Redis 是否正在运行"""
    try:
        # 使用 redis-cli ping 检查 Redis 状态
        result = subprocess.run(
            ['redis-cli', 'ping'],
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            timeout=5
        )
        return result.stdout.decode().strip() == 'PONG'
    except (subprocess.CalledProcessError, subprocess.TimeoutExpired, FileNotFoundError):
        # redis-cli 失败或不存在，表示 Redis 未运行
        return False


def check_redis():
    while True:
        try:
            if is_redis_running():
                # logger.info("Redis 正在运行")
                time.sleep(10)
                continue
            else:
                text = subprocess.run('redis-server &', stdout=subprocess.PIPE, shell=True,
                                      timeout=10).stdout.decode().strip()
                logger.info(f"Redis 启动成功: {text}")
        except Exception as e:
            logger.error(f"check_redis error: {e}")
            time.sleep(10)


if __name__ == '__main__':
    # xhs4 redis
    threading.Thread(target=check_redis).start()
    time.sleep(5)
    redis_client = Redis("localhost", 6379, decode_responses=True)

prop = [
    ("samsung", "samsung", ["SM-G991B", "SM-G973F", "SM-A515F", "SM-N9860", "SM-M526B"]),
    ("xiaomi", "xiaomi", ["MI 10", "MI 11", "Redmi Note 10", "Redmi K40", "POCO X3"]),
    ("huawei", "huawei", ["HUAWEI P30", "Mate 20", "P40 Pro", "Nova 7", "HONOR 9X"]),
    ("oppo", "oppo", ["CPH2005", "Reno 4", "Find X2", "A92", "F17"]),
    ("vivo", "vivo", ["V20", "X60", "Y20", "V2050", "S1 Pro"]),
    ("realme", "realme", ["RMX3085", "realme 7", "realme 8 Pro", "realme GT"]),
    ("oneplus", "oneplus", ["ONEPLUS A6010", "ONEPLUS A5000", "ONEPLUS Nord", "ONEPLUS 9"]),
    ("google", "google", ["Pixel 4", "Pixel 5", "Pixel 6", "Pixel 7"]),
    ("sony", "sony", ["Xperia 1", "Xperia XZ2", "Xperia 10 II"]),
    ("asus", "asus", ["ROG Phone 5", "Zenfone 8", "ASUS_I005DA"]),
    ("lenovo", "lenovo", ["Lenovo TB-8505F", "Lenovo K10", "Lenovo A6 Note"]),
    ("motorola", "motorola", ["moto g(7)", "moto g power", "moto e(6)", "edge 20"]),
    ("meizu", "meizu", ["Meizu 16th", "Meizu 17 Pro", "Meizu M6"]),
    ("honor", "honor", ["HONOR 20", "HONOR View 30", "HONOR X10"]),
    ("zte", "zte", ["ZTE Axon 10 Pro", "Blade V10", "Nubia Z20"]),
    ("blackshark", "blackshark", ["Shark KLE-A0", "Shark PRS-A0"]),
    ("redmagic", "nubia", ["RedMagic 6", "RedMagic 6R", "RedMagic 7"]),
    ("nokia", "nokia", ["Nokia 7.2", "Nokia 6.1", "Nokia X10"]),
    ("sharp", "sharp", ["SHV45", "SHARP S2"]),
    ("tcl", "tcl", ["TCL 10L", "TCL 20 Pro", "TCL Plex"])
]
lotus_site_name = 'facebook_honor'


def ack_job_status(job_id, state, msg=''):
    data = {
        'end_time': datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
        'job_id': job_id,
        'state': state,
        'msg': msg
    }
    try:
        for i in range(5):
            r = requests.post(f'https://api-crawler.datatell.com.cn/lotus/common/log?site_name={lotus_site_name}',
                              data=data,
                              timeout=30)
            print(r.status_code, r.json())
            if r.status_code == 200:
                redis_client.delete(f"fb:job:{job_id}")
                return True
        logger.error(f"Ack task finish failed. job_id:{job_id}, state:{state}, msg:{msg}")
    except Exception as e:
        print('Ack task finish exception:', e)
    return False


def check_job():
    logger.info("check_job start")
    while True:
        try:
            job_list = redis_client.keys("fb:job:*")
            for job_key in job_list:
                start_time = redis_client.get(job_key)
                if start_time:
                    if int(time.time()) - int(start_time) > 1800:
                        logger.error(f"Job:{job_key} timeout")
                        ack_job_status(job_key.split(":")[-1], state='fail', msg='timeout')
            time.sleep(10)
        except Exception as e:
            logger.error(f"Ack task finish exception: {e}")
            time.sleep(10)


def get_facebook_fake_useragent():
    manufacturer, brand, models = random.choice(prop)
    model = random.choice(models)
    return '[FBAN/FB4A;FBAV/500.0.0.57.50;FBBV/696636259;FBDM/{density=3.0,width=1080,height=1920};FBLC/en_US;FBRV/698879235;FBCR/;FBMF/%s;FBBD/%s;FBPN/com.facebook.katana;FBDV/%s;FBSV/9;FBOP/1;FBCA/armeabi-v7a:armeabi;]' % (
        manufacturer, brand, model)


def log(*args):
    try:
        if args:
            logger.info(" ".join([str(i) for i in args]))
    except Exception as e:
        logger.error("log error:" + str(e))


def upload_data(data: Union[str, dict, list], data_type, topic='social_facebook_post_v2', job_id=None):
    if topic == 'social_facebook_post' or topic == "" or topic is None:
        topic = 'social_facebook_post_v2'
    # data_type:  post or comment
    # 删除缓存种子
    redis_client.delete(f"fb:seeds_cache:{job_id}")
    if type(data) == str:
        data = json.loads(data)
    if type(data) == dict:
        temp = data
        data = list()
        data.append(temp)
    if not data:
        Log.warning('data is empty')
        return
    data_wrapped = {
        # 'data_count':len(data),
        'data': data,
        'data_type': data_type,
        'topic': topic,
        'job_id': job_id
    }
    try:
        for i in range(5):
            if i > 0:
                Log.info(f"第{i}次重试上传数据 job_id:{job_id}")
            r = requests.post('https://crawl.datatell.com.cn/fb/write_kafka', json=data_wrapped, timeout=20,
                              verify=False)
            if r.status_code == 200:
                date = datetime.now().strftime('%Y-%m-%d')
                redis_client.incr(f"fb:upload_count:honor:{date}")
                print("seeds count add success!")
                if job_id:
                    if ack_job_status(job_id, 'finish', 'success'):
                        logger.info(f"Update ack_job_status success. job_id:{job_id}")
                Log.success(
                    f"Write kafka topic:', {topic}, {r.status_code}, {r.text}, 'data type:', {data_type}, 'len:', {len(data)}")
                break
            else:
                raise Exception(r.status_code, r.text)
    except Exception as e:
        Log.error('Upload data failed. ' + str(e))


def get_response_from_shell(d: Device, curl, devices="", account_id=""):
    for i in range(5):
        if i > 0:
            logger.debug(f"{d.serial}: 第{i}次请求 重试")
            output = d.shell(curl).output
            if output:
                if "API失败，请检查网络连接或参数配置" in output:
                    logger.warning(f"{d.serial} API失败，请检查网络连接或参数配置 continue")
                    continue
                # logger.success(f"{d.serial} get_response_from_shell success")
                try:
                    data = json.loads(output.strip())
                    # 账号登出
                    check_login = "Error validating access token: The session has been invalidated because the user changed their password or Facebook has changed the session for security"
                    # 账号需要验证
                    if data.get("headers", {}).get("X-Fb-Integrity-Required") == "checkpoint" or check_login in output:
                        logger.warning(f"{d.serial} get_response_from_shell need checkpoint")
                        redis_client.set(f"fb:account_checkpoint:{d.serial}:{account_id}", "1")
                    return output
                except Exception as e:
                    logger.error(f"{d.serial} get_response_from_shell error: {e} \n output: {output}")
                    return ""
    return ""


def run_user_timeline(server_ip, seed, authorization, ua, device=None, from_browser=False, account_id=""):
    try:
        d = device
        info = ""
        PROXY_URL = f"http://{server_ip}:8080/api/facebook-graphql"

        nodeId = seed['seed']
        if redis_client.hget("fb:url2fid", seed['seed']):
            nodeId = redis_client.hget("fb:url2fid", seed['seed']).split(",")[0]
            logger.info(f"{d.serial} Get nodeId from redis: {nodeId}")
        else:
            logger.info(f"{d.serial} Get nodeId from seed: {nodeId}")
        page_size = 50
        page_num = None
        if seed.get("time_ago"):
            break_time = int(time.time()) - seed["time_ago"]
        else:
            break_time = int(time.time()) - 3600 * 24 * 2  # 三天前
        if seed.get("page_num") != None:
            page_num = int(seed.get("page_num"))
        end_cursor = None
        page = 0
        while True:
            page += 1
            res = get_user_timeline_post(authorization, ua, nodeId, PROXY_URL, page_size, end_cursor, get_curl=False,
                                         account_id=account_id)
            logger.info(f"{d.serial}: Get user timeline post page:{page}")
            if res is None:
                seed_queue.put(seed)
                Log.error(
                    f"{d.serial} Get user timeline post page:{page} res is None,put to seed_queue: success, seed : {seed}")
                return
            # 超过帖子数量
            if "Please reduce the amount of data you're asking for, then retry your request" in res:
                page_size = 10
                res = get_user_timeline_post(authorization, ua, nodeId, PROXY_URL, page_size, end_cursor,
                                             get_curl=False, account_id=account_id)
            if res:
                info = json.loads(res)
                info = json.loads(info.get("body")) if info.get("body") else info
                # 处理响应成功但没有数据
                try:
                    if info.get("body") == "":
                        seed_queue.put(seed)
                        Log.error(f"{d.serial} body is empty  put to seed_queue: success, res {res} ")
                        return
                    if info.get("data").get("fetch__User") is None:
                        Log.debug(f'{d.serial} user not found:{nodeId}===> body:{info}')
                        # 执行一次 尝试从浏览器重定向机制拿到新的 uid
                        if not from_browser:
                            response_text = get_response_from_browser(f'https://www.facebook.com/{nodeId}',
                                                                      devices=d.serial,
                                                                      cookies=cookies_pool.get(account_id))
                            if response_text:
                                nodeId = re.search(',"selectedID":"(\d+)"', response_text)
                                if nodeId:
                                    nodeId = nodeId.group(1)
                                    redis_client.hset("fb:url2fid", mapping={seed.get("seed"): nodeId + ","})
                                    seed['seed'] = nodeId
                                    Log.success(f'{d.serial} Get new uid:{nodeId} from browser')
                                    run_user_timeline(server_ip, seed, authorization, ua=ua, device=d,
                                                      from_browser=True, account_id=account_id)
                                    return
                                else:
                                    Log.warning(f"{d.serial} not found selectedID from_browser ")
                        else:
                            ack_job_status(seed.get('job_id'), 'fail', json.dumps({"info": info, "seed": seed}))
                            # 删除缓存种子
                            redis_client.delete(f"fb:seeds_cache:{seed.get('job_id')}")
                except Exception as e:
                    Log.error(f'{d.serial} run_user_timeline try get fetch__User error:{e}  body:{info}')

                edges = info["data"]["fetch__User"]["timeline_feed_units"]["edges"]
                posts = []
                has_reached_after_time = False
                for edge in edges:
                    try:
                        component = edge['node']['__typename']
                        if 'Story' in component:
                            p = solve_post_info(edge['node'])
                            if p is None:
                                continue
                            logger.debug(
                                f"{d.serial} get post_id:{p.get('post_id')} post_url:{p.get('post_url')} date:{datetime.fromtimestamp(p.get('publish_timestamp') / 1000).strftime('%Y-%m-%d %H:%M:%S')}")
                            posts.append(p)
                    except Exception as e:
                        Log.error(f"{d.serial} for solve_post_info error: ", e)
                        continue
                topic = seed.get('topic') or 'social_facebook_post'
                upload_data(posts, data_type="facebook_post", topic=topic, job_id=seed.get('job_id'))
                Log.info(f"{d.serial} Get {len(posts)} posts from seed:{seed}")
                if page_num:
                    if page >= page_num:
                        logger.warning(f"{d.serial} run_user_timeline break_time: {break_time}")
                        break

                elif posts[-1]['publish_timestamp'] < break_time * 1000:
                    logger.warning(f"{d.serial} run_user_timeline break_time: {break_time}")
                    break
                try:
                    page_info = info["data"]["fetch__User"]["timeline_feed_units"]["page_info"]
                    if page_info.get("has_next_page"):
                        end_cursor = page_info.get('end_cursor')
                        logger.info(f"{d.serial} Get next page:{end_cursor}")
                except:
                    Log.error(f'{d.serial} run_user_timeline page info error:{e}')
            else:
                seed_queue.put(seed)
                Log.warning(
                    f"{d.serial} Get user timeline post page:{page + 1} res is empty: {info} put to seed_queue: success")
                return
            if page > 0:
                time.sleep(10)
    except Exception as e:
        if type(info) == dict:
            if info.get("data", {}).get("fetch__User") is None:
                # 用户可能不存在(封禁)
                ack_job_status(seed.get('job_id'), 'fail', json.dumps({"info": info, "seed": seed}))
                # 删除缓存种子
                redis_client.delete(f"fb:seeds_cache:{seed.get('job_id')}")
                return
        ack_job_status(seed.get('job_id'), 'fail', json.dumps({"error": str(e), "seed": seed}))
        # 删除缓存种子
        redis_client.delete(f"fb:seeds_cache:{seed.get('job_id')}")
        Log.error(
            f'{d.serial} Get user timeline error: {e} seed: {seed} body:{info}')


def run_group_timeline(server_ip, seed, authorization, ua, device=None, account_id="", from_browser=False):
    try:
        # server_ip = "127.0.0.1"
        d = device
        page_num = None
        PROXY_URL = f"http://{server_ip}:8080/api/facebook-graphql"
        groud_id = seed['seed']
        if redis_client.hget("fb:url2fid", seed['seed']):
            groud_id = redis_client.hget("fb:url2fid", seed['seed']).split(",")[0]
            logger.info(f"{d.serial} Get groud_id from redis: {groud_id}")
        else:
            logger.info(f"{d.serial} Get groud_id from seed: {groud_id}")
        page_size = seed.get('page_size', 50)
        end_cursor = None
        info = ""
        page = 0
        if seed.get("time_ago"):
            break_time = int(time.time()) - seed["time_ago"]
        else:
            break_time = int(time.time()) - 259200  # 三天前
        if seed.get("page_num") != None:
            page_num = int(seed.get("page_num"))
        while True:
            page += 1
            if page > 200:
                logger.warning(f"{d.serial} run_group_timeline page > 200 break")
                return
            res = get_group_timeline_post(authorization, ua, groud_id, PROXY_URL, page_size, end_cursor,
                                          devices=d.serial,
                                          get_curl=False, account_id=account_id)
            if res is None:
                seed_queue.put(seed)
                Log.error(f"{d.serial} Get group timeline res is None put to seed_queue: success seed : {seed}")
                return
            # 超过帖子数量
            if "Please reduce the amount of data you're asking for, then retry your request" in res:
                page_size = 10
                res = get_group_timeline_post(authorization, ua, groud_id, PROXY_URL, page_size, end_cursor,
                                              devices=d.serial,
                                              get_curl=False, account_id=account_id)
            if res:
                info = json.loads(res)
                info = json.loads(info.get("body")) if info.get("body") else info
                # 处理响应成功但没有数据
                try:
                    if info.get("body") == "" or info.get("data", {}).get("group_address") is None:
                        Log.debug(f'group not found:{groud_id}===> body:{info}')
                        redis_client.delete(f"fb:seeds_cache:{seed.get('job_id')}")
                        if not from_browser:
                            # 尝试从浏览器重定向机制拿到新的 uid
                            response = get_response_from_browser(f'https://www.facebook.com/{groud_id}',
                                                                 cookies=cookies_pool.get(account_id))
                            if response:
                                groupID = re.search('"groupID":"(\d+)",', response.text)
                                if groupID:
                                    groupID = groupID.group(1)
                                    redis_client.hset("fb:url2fid", mapping={seed.get("seed"): groupID + ","})
                                    seed['seed'] = groupID
                                    Log.success(f'Get new uid:{groupID} from browser')

                                    run_group_timeline(server_ip, seed, authorization, from_browser=True, ua=ua,
                                                       device=device, account_id=account_id)
                                    return
                except Exception as e:
                    Log.error(f'{d.serial} run_group_timeline c error:{e}  body:{info} seed: {seed}')
                edges = info["data"]["group_address"]["group_feed"]["edges"]
                posts = []
                for edge in edges:
                    try:
                        p = solve_post_info(edge['node'])
                        if p is None:
                            continue
                        logger.debug(
                            f"{d.serial} group_id:{groud_id} get post_id:{p.get('post_id')} post_url:{p.get('post_url')} date:{datetime.fromtimestamp(p.get('publish_timestamp') / 1000).strftime('%Y-%m-%d %H:%M:%S')}")
                        posts.append(p)
                    except Exception as e:
                        Log.error(f"{d.serial} run_group_timeline for solve_post_info error: {e}")
                        continue
                topic = seed.get('topic') or 'social_facebook_post'
                Log.info(f"{d.serial} Get {len(posts)} posts from seed:{seed}")
                upload_data(posts, data_type="facebook_post", topic=topic, job_id=seed.get('job_id'))
                if page_num:
                    if page > page_num:
                        logger.warning(f"{device} run_group_timeline break_time: {break_time}")
                        break

                elif posts[-1]['publish_timestamp'] < break_time * 1000:
                    logger.warning(f"{device} run_group_timeline break_time: {break_time}")
                    break
                try:
                    # 翻页游标
                    page_info = info["data"]["group_address"]['group_feed']["page_info"]
                    if page_info['has_next_page']:
                        end_cursor = page_info['end_cursor']
                except Exception as e:
                    Log.error(f'{d.serial} run_group_timeline page info error:{e}')
            else:
                seed_queue.put(seed)
                Log.error(f"{d.serial} body is empty  put to seed_queue: success, res {res} ")
                return
    except Exception as e:
        if type(info) == dict:
            if info.get("data", {}).get("group_feed") is None:
                # 群组可能不存在(封禁)
                ack_job_status(seed.get('job_id'), 'fail', json.dumps({"info": info, "seed": seed}))
                # 删除缓存种子
                redis_client.delete(f"fb:seeds_cache:{seed.get('job_id')}")
                return

        ack_job_status(seed.get('job_id'), 'fail', json.dumps({"error": str(e), "seed": seed}))
        # 删除缓存种子
        redis_client.delete(f"fb:seeds_cache:{seed.get('job_id')}")
        Log.error(
            f'{d.serial} Get group timeline error: {e} seed: {seed} info {info}')


def run_profile(server_ip, seed, authorization, ua, device=None, from_browser=False, devices="", account_id=""):
    try:
        d = device

        PROXY_URL = f"http://{server_ip}:8080/api/facebook-graphql"
        nodeId = seed['seed']
        info = ""
        ua = get_facebook_fake_useragent()
        res = get_user_profile(authorization, ua, nodeId, PROXY_URL, get_curl=False, account_id=account_id)
        if res != None:
            info = json.loads(res).get("body")
            info = json.loads(info.split("\n")[0]).get("data", {}).get("user")
            profile = solve_user_info(info)
            if profile:
                topic = seed.get('topic') or "social_facebook_post"
                upload_data(profile, data_type="facebook_user", topic=topic, job_id=seed.get('job_id'))
            else:
                ack_job_status(seed.get('job_id'), 'fail', json.dumps({"error": "no data"}))
        else:
            seed_queue.put(seed)
            Log.error(f"{d.serial} body is empty  put to seed_queue: success, seed {seed} ")
    except Exception as e:
        if type(info) == dict:
            if info.get("data", {}).get("user") is None:
                # 用户可能不存在(封禁)
                ack_job_status(seed.get('job_id'), 'fail', json.dumps(info))
                return
        ack_job_status(seed.get('job_id'), 'fail', json.dumps({"error": str(e)}))
        # 删除缓存种子
        redis_client.delete(f"fb:seeds_cache:{seed.get('job_id')}")
        Log.error(f'{d.serial}run_profile error: {e}')


def get_Id_from_url(url, device, account_id):
    try:
        d = device

        url = url[:-1] if url.endswith('/') else url
        uid_info = redis_client.hget("fb:url2fid", url)
        if uid_info:
            u = uid_info.split(",")[0]
            logger.success(f"{d.serial} url {url} get id from redis id: {u}")
            return u
        uid = re.search("\d{8,17}", url)
        if uid:
            return uid.group(0)
        else:
            # curl = get_response_from_go_server(url, devices=d.serial,
            #                                    get_curl=True
            #                                    )

            response = get_response_from_browser(url, devices=d.serial, cookies=cookies_pool.get(account_id))

            if response:
                try:
                    response_text = json.loads(response).get("body")
                    uid = re.search('"selectedID":"(\d+)",', response_text)
                    if not uid:
                        uid = re.search('"groupID":"(\d+)"', response_text)
                    if uid:
                        print(f'Get userId from url success: {uid.group(1)} url: {url}')
                        # with open('uid.txt', 'a+') as f:
                        #     f.write(f"{url}:{uid.group(1)}")
                        redis_client.hset("fb:url2fid", mapping={url: uid.group(1) + ","})
                        return uid.group(1)
                except Exception as e:
                    logger.error(f'{d.serial} Get userId from url error: {e}')
        return url
    except Exception as e:
        logger.error(f'{d.serial} Get userId from url error: {e}')
    return url


def run_post(server_ip, seed, authorization, ua, device=None, account_id=""):
    PROXY_URL = f"http://{server_ip}:8080/api/facebook-graphql"
    post_url = seed['seed']
    res = get_post(authorization, ua, post_url, PROXY_URL, devices=device, get_curl=False, account_id=account_id)
    if res:
        try:
            info = json.loads(res)
            body = json.loads(info.get("body", "{}"))
            if body:
                data = body.get("data", {})
                p = data.get('video_home_deeplink')
                # p = data.get("node")
                post = solve_post_info(p)
                if post:
                    # post["mission_name"] = "search_keyword"
                    topic = seed.get('topic') or "social_facebook_post"
                    logger.success(f"get post success: {post}")
                    # with open('post.txt', 'a+', encoding='utf-8') as f:
                    #     f.write(json.dumps(post) + "\n")
                    # with open("upload_url_list.txt", "a+", encoding='utf-8') as f:
                    #     f.write(post_url + "\n")
                    logger.debug(
                        f"get post_id:{post.get('post_id')} post_url:{post.get('post_url')} date:{datetime.fromtimestamp(post.get('publish_timestamp') / 1000).strftime('%Y-%m-%d %H:%M:%S')}")
                    upload_data(post, data_type="facebook_post", topic=topic, job_id=seed.get('job_id'))
        except Exception as e:
            ack_job_status(seed.get('job_id'), 'fail', json.dumps({"error": str(e), "seed": seed}))
            logger.error(f"{device} run_post error: {e} {res}")
            return {}
    else:
        ack_job_status(seed.get('job_id'), 'fail', json.dumps({"error": "no data"}))


def get_seeds(d, account_id=""):
    try:
        r = requests.get(f'https://api-crawler.datatell.com.cn/lotus/common/job?site_name={lotus_site_name}',
                         timeout=30)
        if r.status_code == 200:
            seed_info = json.loads(r.text)
            if seed_info['code'] == 200:
                job = seed_info['job']
                # 暂时只处理用户帖子、群组帖子、搜索
                if job.get('type') == 'user' and job.get('name') == 'timeline':
                    return job
                elif job.get('type') == 'page' and job.get('name') == 'timeline':
                    return job
                elif job.get('type') == 'url':

                    if job["seed"].isdigit():
                        return job
                    job["seed"] = job["seed"].replace("u003d", "=").replace("\\u003d", "=")
                    uid = get_Id_from_url(job.get('seed'), d, account_id)
                    if uid.isdigit():
                        if "/groups/" in job["seed"]:
                            job["type"] = "group"
                        if "/profile" in job["seed"]:
                            job["type"] = "profile"

                    Log.success(f'{d.serial} get_userId_from_url success :{uid}')
                    Log.info(f"{d.serial} origin seed: {job['seed']}")
                    job['seed'] = uid

                    return job
                elif job.get('type') == 'post' and job.get('name') == 'post':
                    return job
                elif job.get('type') == 'group' and job.get('name') == 'timeline':
                    return job
                else:
                    Log.error(f"{d.serial} other type job: {job}")
        return None
    except Exception as e:
        Log.error(f'{d.serial} Get seeds error: {e}')
        return None


def do_watch_video(d: Device, user_id):
    fb = Facebook(0)
    # fb.start(d)
    stop_flag = StopFlag()
    script_thread = fb.loop_async(d, 'watch_video', {}, 90,
                                  stop_flag)
    # handler: FbDataHandler = handlers.get(d.serial)
    # posts = handler.wait_for_posts('text', keyword, 'contains', no_data_timeout=30, count=3 * 10, timeout=90)
    time.sleep(10)
    log(f"{d.serial} watch video done")
    stop_flag.stopped = True
    script_thread.join()


def get_account_info(d: Device, user=0):
    try:
        token_path = f"/data/user/{user}/com.facebook.katana/app_light_prefs/com.facebook.katana/authentication"
        data = d.shell(f"cat {token_path}", user="root").output
        account = re.search('c_user","value":"(\d+)"', data)
        token = re.search("access_token.*?(E\w+)", data)
        cookies = re.search("session_cookies_string.*?(\[.*?])", data)
        account = account.group(1) if account else ""
        token = token.group(1) if token else ""
        cookies = cookies.group(1) if cookies else ""
        # if cookies:
        #     cookies = json.loads(cookies)
        #     cookies_info =";".join([f"{c.get('name')}={c.get('value')}" for c in cookies])
        try:
            ip_info = d.shell("ifconfig wlan0", user="root").output

            ip = re.search(" inet addr:(\d+\.\d+\.\d+\.\d+)", ip_info).group(1)
        except Exception as e:
            ip = ""

        device = d.serial
        account_info = {
            "user": user,
            "device": device,
            "authorization": token,
            "network_ip": ip,
            "cookies_info": cookies,
        }
        # print(info[device])
        if account:
            return account, account_info
        # 可能是登出
        if not account:
            file_info = d.shell(f"ls /data/user/{user}/com.facebook.katana/app_light_prefs/com.facebook.katana",
                                user="root").output
            account = re.search('(\d{14,16})', file_info)
            account = account.group(1) if account else ""
            return account, account_info
        else:
            return None, None
    except Exception as e:
        print('Get token error:', e)
        return None, None


def get_wm_size(d: Device):
    try:
        size_str = d.shell("wm size").output.strip()
        m = re.search(r'(\d+)x(\d+)', size_str)
        return int(m.group(1)), int(m.group(2))
    except Exception as e:
        logger.error(f" {d.serial} Error getting wm size: {e}")
    return 1080, 1920


def exception(d: Device, stop_flag):
    logger.warning(f"{d.serial} exception start...")
    while not stop_flag:
        page = d.get_page()
        if "Not now" in page:
            d.click('Not now')
            log(d.serial, 'Waiting Not now')
            time.sleep(3)
        if "Skip" in page:
            d.click('Skip')
            log(d.serial, 'Waiting Skip')
            time.sleep(3)
        time.sleep(2)

    log(d.serial, 'exception Waiting Finish')


def check_account(d: Device, stop_flag):
    logger.warning(f"{d.serial} Checking account......")
    # page = d.get_page()
    user_list_info = d.shell("pm list users", user="root").output
    user_list = re.findall("(UserInfo\{\d+:.*?)\r?\n?", user_list_info)
    threading.Thread(target=exception, args=(d, stop_flag)).start()
    for user_info in user_list:

        user_id = int(re.search("UserInfo\{(\d+):", user_info).group(1))
        setting_network(d, user_id)
        user_state = 1 if "running" in user_info else 0
        if user_state == 0:
            d.shell(f"am start-user {user_id}")
        account, account_info = get_account_info(d, user_id)
        if account:

            retry_time = 0
            start_time = time.time()
            d.shell(f"am force-stop --user {user_id} com.facebook.katana")
            time.sleep(2)
            d.shell(f"am start --user {user_id} -n com.facebook.katana/com.facebook.katana.activity.FbMainTabActivity")
            time.sleep(5)

            # 可能会出现需要点击log in 或者continue
            while 'Continue' in d.get_page():
                if time.time() - start_time > 60:
                    break
                d.click('Continue')
                log(d.serial, 'Waiting Continue')
                # 超过5次退出
                if retry_time > 5:
                    break
                time.sleep(5)
                # 需要接码
                if "Confirm your account" in d.get_page():
                    log(d.serial, 'Need to send code')
                    # 删除在线账号信息
                    if redis_client.hexists(f"fb:online:{d.serial}", account):
                        redis_client.hdel(f"fb:online:{d.serial}", account)
                    redis_client.hset(f"fb:verify:{d.serial}", mapping={account: json.dumps(account_info)})
                    continue
                retry_time += 1

            retry_time = 0
            while 'Log in' in d.get_page():
                if time.time() - start_time > 60:
                    break
                d.click('Log in')
                log(d.serial, 'Waiting Log in')
                if retry_time > 2:
                    break
                time.sleep(5)
                retry_time += 1

            if "Enter your password to continue" in d.get_page():
                d.back()

            # 账号登出
            if "Password" in d.get_page() or "Mobile number or email" in d.get_page():
                if account is None:
                    logger.warning(f"{d.serial} No Account info")
                    continue
                password = redis_client.hget(f"fb:accounts:{account}", "password")
                if password:
                    if "Mobile number or email" not in d.get_page():
                        d.click("Password")
                        d.shell(f"input text {password}", user="root")
                        d.click("Log in")
                if redis_client.hexists(f"fb:online:{d.serial}", account):
                    redis_client.hdel(f"fb:online:{d.serial}", account)
                    logger.warning(f"{d.serial} Account {account} log out!!!")
                redis_client.hset(f"fb:log_out:{d.serial}", mapping={account: json.dumps(account_info)})

            #  怀疑自动化
            if "We suspect automated behavior on your account" in d.get_page() or "我们怀疑您的账户" in d.get_page():
                device_x, device_y = get_wm_size(d)
                logger.warning(d.serial, "自动化弹窗出现")
                if device_y > 1920:
                    bounds_y = re.findall('bounds="\[\d+,\d+]\[\d+,(\d+)]"', d.get_page())
                    if bounds_y:
                        bounds_y = min(int(x) for x in bounds_y)
                    bound_x, bound_y = int(device_x * random.uniform(0.2, 0.8)), int(
                        bounds_y - random.randint(100, 150))
                else:
                    bound_x, bound_y = int(device_x * random.uniform(0.2, 0.8)), int(
                        device_y * random.uniform(0.289, 0.339))
                log(d.serial, f"点击弹窗 坐标:{bound_x},{bound_y}")
                d.click_xy(bound_x, bound_y)
                nowtime = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
                redis_client.lpush(f"fb:automated:{d.serial}", nowtime)
                time.sleep(3)

            # 正常账号
            page = d.get_page()
            if "search" in page or "Search" in page or "搜索" in page:
                log(f"{d.serial} user {user_id}  Account is OK !!!")
                if not account_info is None and account_info.get("authorization"):
                    # 更新账号信息
                    account_info["status"] = 1
                    redis_client.set(f"fb:account_checkpoint:{account}", "0")
                    redis_client.hset(f"fb:online:{d.serial}", mapping={account: json.dumps(account_info)})
                    continue

            # 验证码|视频人脸验证
            if "Enter the text from the image" in d.get_page() or "Record a video of yourself" in d.get_page():
                if redis_client.hexists(f"fb:online:{d.serial}", account):
                    redis_client.hdel(f"fb:online:{d.serial}", account)
                redis_client.hset(f"fb:verify:{d.serial}", mapping={account: json.dumps(account_info)})
                account_info["status"] = 2
                # return False
        else:
            logger.warning(f"{d.serial} user {user_id} No Account info")
    return True


# 活跃账号
def active_account(d: Device, user_id):
    logger.warning(f"{d.serial} Active account......")
    # do_search(d, "honor")
    do_watch_video(d, user_id)


def group_worker(d: Device, account_info, group_id):
    ip = "127.0.0.1"
    seed = {}
    seed["seed"] = group_id
    seed["page_size"] = 50
    authorization = account_info.get("authorization")
    ua = get_facebook_fake_useragent()
    run_group_timeline(ip, seed, authorization, ua=ua, device=d)


def get_cache_seed_from_redis():
    all_keys = redis_client.keys("fb:seeds_cache:*")
    if all_keys:
        key = all_keys.pop()
        seed = redis_client.hgetall(key)
        return seed
    else:
        return None


def parse_cookie_info(cookie_list):
    try:
        cookies = {}
        for cookie in cookie_list:
            if cookie.get("name") == "xs":
                cookies[cookie.get("name")] = quote(cookie.get("value"))
                continue
            cookies[cookie.get("name")] = cookie.get("value")
        return cookies
    except Exception as e:
        logger.warning(f"{cookie_list} Cookie info error: {e}")
        return cookies


def todo(d: Device, stop_flag):
    seed = {}
    try:
        hour = datetime.today().hour
        if setting_network(d):
            if redis_client.exists(f"fb:no_network:{d.serial}"):
                redis_client.delete(f"fb:no_network:{d.serial}")
            # if check_account(d, stop_flag):
            note = redis_client.hget("fb:device_note", d.serial)
            if note == "group2":
                if 6 <= hour <= 12:
                    logger.info(f"{d.serial} {note} 6-12 小时 休眠")
                    time.sleep(60)
                    return
            else:
                if 1 <= hour <= 6:
                    logger.info(f"{d.serial} {note} 1-6 小时 休眠")
                    time.sleep(60)
                    return
            if 1:
                accounts = hGetAll_accounts("online").get(d.serial) or {}
                for account, account_info in accounts.items():

                    account_info = json.loads(account_info)
                    user_id = account_info.get("user")
                    cookies_info = json.loads(account_info.get("cookies_info") or '[]')
                    cookies = parse_cookie_info(cookies_info)
                    cookies_pool[account] = cookies

                    # 异常账号
                    if redis_client.get(f"fb:account_checkpoint:{account}") == "1":
                        logger.warning(f"{d.serial} Account {account} checkpoint!!!")
                        continue
                    if hour == 0 and datetime.today().minute > 10:
                        active_account(d, user_id)
                        time.sleep(1800)
                        return 1
                    # if type_ == "honor":
                    # 单设备多账号轮换休眠一个账号
                    # =================================================
                    # device_sleep_status = redis_client.get(f"fb:account_sleep_time:{d.serial}:sleep_status")
                    # if device_sleep_status == None:
                    #     redis_client.set(f"fb:account_sleep_time:{d.serial}:{account}", str(int(time.time())))
                    #     # redis_client.set(f"fb:account_sleep_time:{d.serial}:{account}", str(1758093142))
                    #     redis_client.set(f"fb:account_sleep_time:{d.serial}:sleep_status", "sleep")
                    #     redis_client.set(f"fb:account_sleep_time:{account}:sleep_status", "sleep")
                    #     continue
                    #
                    # if redis_client.get(f"fb:account_sleep_time:{d.serial}:sleep_status") == "sleep":
                    #     if redis_client.get(f"fb:account_sleep_time:{account}:sleep_status") == "sleep":
                    #         account_sleep_time = redis_client.get(f"fb:account_sleep_time:{d.serial}:{account}")
                    #         # 账号休眠时间超过8小时，重新激活账号
                    #         if int(time.time()) - int(account_sleep_time) >= 3600 * 6:
                    #             redis_client.set(f"fb:account_sleep_time:{d.serial}:sleep_status", "active")
                    #             redis_client.set(f"fb:account_sleep_time:{account}:sleep_status", "active")
                    #         else:
                    #             continue
                    # if redis_client.get(f"fb:account_sleep_time:{d.serial}:sleep_status") == "active":
                    #     account_sleep_time = redis_client.get(f"fb:account_sleep_time:{d.serial}:{account}")
                    #     if account_sleep_time == None or int(time.time()) - int(account_sleep_time) >= 3600 * 24:
                    #         redis_client.set(f"fb:account_sleep_time:{d.serial}:{account}", str(int(time.time())))
                    #         redis_client.set(f"fb:account_sleep_time:{d.serial}:sleep_status", "sleep")
                    #         redis_client.set(f"fb:account_sleep_time:{account}:sleep_status", "sleep")
                    #         continue

                    # ================================================================
                    if accounts:
                        # for account, account_info in accounts.items():
                        if seed_queue._qsize() != 0:
                            seed = seed_queue.get()
                            Log.success(f'{d.serial} seed_queue.get success seed:{seed}')
                            Log.info(f"seed_queue sizes: {seed_queue._qsize()}")
                        else:
                            # seed = get_cache_seed_from_redis()
                            seed = get_seeds(d, account)
                        if seed:
                            # logger.info(f"===={d.serial} Get seed: {seed}====")
                            job_id = seed.get('job_id')
                            if job_id:
                                redis_client.set(f"fb:job:{job_id}", str(int(time.time())))
                            # 缓存种子防止脚本中断种子丢失,数据上传成功后删除缓存  upload_data
                            redis_client.setex(f"fb:seeds_cache:{seed.get('job_id')}", 86400, json.dumps(seed))
                            # redis_client.hset(key, mapping=seed)
                            # redis_client.expire(key, 86400)  # 一天
                            run_task(d, account_info, seed, account_id=account)
                        else:
                            logger.warning(f"{d.serial} No seed")

                    # time.sleep(sleep_time)

            else:
                logger.warning(f"{d.serial} Account abnormal!!!")
                logger.warning(f"{d.serial} Waiting {sleep_time} s......")
                time.sleep(sleep_time)
                return 1
            # time.sleep(sleep_time)
            # Log.warning(f'{d.serial} Waiting {sleep_time} s......')

        else:
            redis_client.set(f"fb:no_network:{d.serial}", get_now_time())
            logger.warning(f"{d.serial} Network is not OK reboot device time sleep 5 min!!!")
            d.shell("reboot")
            time.sleep(300)
            return 1
    except Exception as e:
        logger.error(f"{d.serial} todo error: {e}")
        ack_job_status(seed.get('job_id'), 'fail', json.dumps({"error": str(e), "seed": seed}))
        return 1


def get_now_time():
    return datetime.now().strftime("%Y-%m-%d %H:%M:%S")


def to_curl(PROXY_URL, headers, data):
    # 转义 headers 和 URL 中的单引号以避免 shell 解析问题
    def escape_shell_string(s):
        return str(s).replace("'", "'\\''")

    # 格式化 headers 为 cURL 的 -H 选项
    header_str = ' '.join(f"-H '{k}: {escape_shell_string(v)}'" for k, v in headers.items())

    # 构造 JSON 请求体
    payload = json.dumps({"headers": headers, "data": data}, ensure_ascii=False)
    # 转义 payload 中的单引号
    escaped_payload = payload.replace("'", "'\\''")

    # 构造 cURL 命令
    curl_cmd = (
        f"curl -s -X POST '{escape_shell_string(PROXY_URL)}' "
        f"-H 'Content-Type: application/json' {header_str} "
        f"-d '{escaped_payload}' --max-time 30"
    )
    return curl_cmd


def run_task(d: Device, account_info, seed, account_id=""):
    ip = "127.0.0.1"
    authorization = account_info.get("authorization")
    # cookies = account_info.get("cookies")
    try:
        if seed:
            ua = get_facebook_fake_useragent()
            Log.success(f'{d.serial} Get seed: {seed}')
            if seed["name"] != "post" and str(seed["seed"]).startswith("http"):
                Log.error(f"不支持url: {seed}")
                ack_job_status(seed['job_id'], "fail",
                               json.dumps({"error": "not supported url(url to id fail)", "seed": seed}))
                # 删除缓存种子
                redis_client.delete(f"fb:seeds_cache:{seed.get('job_id')}")
                return
            # 群组
            if seed['type'] == 'group':
                run_group_timeline(ip, seed, authorization, ua=ua, device=d, account_id=account_id)
            # 帖子详情
            elif seed["name"] == "post":
                run_post(ip, seed, authorization, ua=ua, device=d, account_id=account_id)
            # 用户详情
            elif seed["name"] == "profile":
                run_profile(ip, seed, authorization, ua=ua, device=d, account_id=account_id)
            # 用户
            elif str(seed["seed"]).isdigit():
                run_user_timeline(ip, seed, authorization, ua=ua, device=d, account_id=account_id)
    except Exception as e:
        ack_job_status(seed.get('job_id'), 'fail', json.dumps({"error": str(e), "seed": seed}))
        print(f"run_task error {d.serial} {e}")


def hGetAll_accounts(key):
    keys = redis_client.keys(f"fb:{key}:*")
    onlines = {key.split(":")[-1]: redis_client.hgetall(key) for key in keys}
    return onlines


def wechat_report(interval=1800):
    while True:
        cur_date = time.strftime('%Y-%m-%d', time.localtime())
        cur_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())
        try:
            seed_count = redis_client.llen('fb:groups')

            post_counts = redis_client.hgetall(f'fb:count:gposts:{cur_date}')
            api_counts = redis_client.hgetall(f'fb:count:gapi:{cur_date}')

            total = redis_client.hget('fb:count:gtotal', cur_date)
            today = redis_client.scard(f'fb:count:today_posts:{cur_date}')

            health = redis_client.hgetall('fb:account:health')

            healthy_count = 32 - len(health)

            count_info = ""
            # for node, info in token_infos.items():
            #     serial, imei, fid, token = info
            #
            #     health_status = health.get(serial)
            #     if health_status is None:
            #         health_status = ''
            #
            #     post_count = post_counts.get(serial)
            #     api_count = api_counts.get(serial)
            #
            #     count_info += f'### {node} {serial} {fid}\n{post_count}\t{api_count}\t{health_status}\n'

            info = f"""# 群组采集
种子:{seed_count}
{cur_time} 汇报间隔: {interval}s
总采集:{total}
今日发布:{today}
账号数:{healthy_count}
----------------
{count_info}"""
            print(info)
            # wechat.push_markdown(info)
        except Exception as e:
            traceback.print_exc()
        time.sleep(interval)


def check_account_thread(d: Device):
    while True:
        stop_flag = StopFlag().stopped
        logger.info(f"{d.serial} check_account_thread start")
        check_account(d, stop_flag)
        time.sleep(600)


def Device_(serial=""):
    try:
        d = Device() if not serial else Device(sagit=serial)
    except:
        d = Device(device_ip="localhost", port=5555)
    return d


def update_note_thread():
    while True:
        try:
            res = requests.get("https://api-crawler.datatell.com.cn/termux/get_note?project_name=facebook")
            if res.status_code == 200:
                data = res.json().get("data")
                for device, info in data.items():
                    redis_client.hset("fb:device_note", mapping={device: info})
            time.sleep(3600)
        except Exception as e:
            logger.error(f"update_note_thread error{e}")
            time.sleep(10)


def worker(device, server_ip):
    d = Device_()
    d.shell(f'su -c setenforce 0')
    threading.Thread(target=send_count_to_server, args=(d.serial,)).start()
    threading.Thread(target=check_account_thread, args=(d,)).start()
    time.sleep(30)
    while True:
        try:
            if not d.serial:
                d = Device_()
            screenState = d.shell('dumpsys window policy | grep screenState', user="root").output.strip()
            if screenState == "screenState=0" or screenState == "screenState=SCREEN_STATE_OFF":
                d.shell('input keyevent KEYCODE_POWER')
                d.swipe_up(0.8)
                d.shell("settings put system screen_off_timeout 2147483647")
                d.shell('input keyevent KEYCODE_BACK')
                d.shell('input keyevent KEYCODE_HOME')
                d.click('CANCEL')
            # d = Device(device)
            # 检测go 服务是否正常
            info = d.shell("ps -ef |grep facebook").output
            if "facebook-server" not in info:
                d.shell("nohup /data/local/tmp/facebook-server >/dev/null 2>&1", timeout=10)
                print("facebook-server start......")
            else:
                print("facebook-server started.....")
            stop_flag = StopFlag().stopped
            todo(d, stop_flag)
            sleep_time = random.randint(35, 40)
            logger.info(f"{d.serial} worker sleep {sleep_time} s")
            time.sleep(sleep_time)
            stop_flag = True

        except Exception as e:
            if "not online" in str(e):
                redis_client.sadd("fb:device_connect_failed", f"{device}")
            logger.error(f"worker error {device} {e}")


def web_post_scheduler(rootTask, OpenPermalink, mission_id="10795178"):
    payload = json.dumps({

        "token": "9c7f3f294b0bfbb13604073fba440df7",

        "missionId": f"{mission_id}",

        "rootTask": rootTask,

        "isPriority": False,

        "configParam": {

            # "output.name": "shuyuan_dajiang_poc_user_v1"

        },

        "transferParam": {
            "OpenPermalink": OpenPermalink,

            "comment_page_num": 1,

            "comment_depth": 1

        }
    })
    headers = {
        'User-Agent': 'apifox/1.0.0 (https://www.apifox.cn)',
        'Content-Type': 'application/json',
        'Content-Length': ''
    }
    for i in range(5):
        try:
            response = requests.request("POST", "https://arrakis.datatell.com.cn/api/crawler/jobAllotment/start",
                                        headers=headers, data=payload)
            logger.info(f"web_post_scheduler response:{response.text}")
            if response.status_code == 200:
                break
            if response.json()['code'] != "operation succeed":
                logger.info(f"web_post_scheduler {rootTask} ：失败， 重试第{i + 1}次")
            if i == 4:
                logger.info(f"web_post_scheduler 刷种子失败：{rootTask}")
        except Exception as e:
            if "ConnectionError" in str(e):
                logger.info(f"web_post_scheduler 连接失败， 等待5秒")
                time.sleep(3)
                pass
            else:

                continue


def run_search_post():
    logger.info("run_search_post start")
    while True:
        try:
            seed = redis_client.lpop("fb:post:queue")
            if seed:
                keyword = ""
                if len(seed.split(",")) == 4:
                    keyword, url, OpenPermalink, date = seed.split(",")
                else:
                    url, OpenPermalink, date = seed.split(",")
                logger.info(
                    f"run_search_post get seed:keyword:{keyword} url:{url} OpenPermalink:{OpenPermalink} task date:{date}")
                web_post_scheduler(url, OpenPermalink)
                # time.sleep(2)
        except Exception as e:
            logger.error(f"run_search_post redis get error:{e}")
            time.sleep(2)
            continue


def check_local_fid():
    local_redis = Redis(host='localhost', port=6379, db=0)
    if not local_redis.exists("fb:url2fid"):
        with open("/data/local/tmp/mpsf/fb_url2fid.txt", "r", encoding="utf-8") as f:
            data_list = f.read().splitlines()
        for data in data_list:
            url, fid = data.split("\t")
            local_redis.hset("fb:url2fid", mapping={url: fid})


def send_count_to_server(device):
    while True:
        try:
            hour = datetime.today().hour
            if hour < 8:
                logger.info(f"当前时间{hour}小时，不发送数据")
                time.sleep(60)
                continue
            date = datetime.now().strftime('%Y-%m-%d')
            key = f"fb:upload_count:honor:{date}"
            upload_key = f"fb:upload_count:honor:{device}:{date}"
            count = redis_client.get(key)
            count = int(count) if count else None
            data = {
                "project_name": "fb_honor",
                "key": upload_key,
                "count": count
            }
            res = requests.post("https://api-crawler.datatell.com.cn/termux/upload", json=data)
            logger.info("upload count to server: " + res.text)
            time.sleep(60)
        except Exception as e:
            time.sleep(5)
            logger.error(f"send_count_to_server error: {e}")


def test_main():
    d = Device("87a8eacf")
    authentication = d.shell("cat /data/data/com.facebook.katana/app_light_prefs/com.facebook.katana/authentication",
                             user="root")
    print(authentication)
    ua = get_facebook_fake_useragent()
    os.system("adb -s 87a8eacf forward tcp:8080 tcp:8080")
    authorization = 'EAAAAUaZA8jlABPXiozILfsM0szuzTwmgno7R3SI7cTQMsQoXuCfVZBz3pLFPeQy9TVPpFB6MMhw670kFKZAH5Ac5Wy5WCZA4hD1ZBpKoev5olmJoqUukZCwzTxTJpngRDMFd7QVJALZAORwHbUqQ9w8FaZBTh9UtMjwYTFL3a92n1NoI1wxglxjQbKnjPCc2wNVSujBqvZCTSPwZDZD'
    # post_url = "https://www.facebook.com/reel/1606933054048179"
    # note_id = "61571854068013"

    post_url_list = []

    # with open("fb_search_post.txt", "r", encoding="utf-8") as f:
    #     post_url_list_info = f.read().splitlines()
    #     post_url_list_info.reverse()
    #     for post_url in post_url_list_info:
    #         post_url = post_url.split(",")[1]
    #         if post_url in post_url_list:
    #             # logger.warning(f"post_url:{post_url} is in post_url_list continue")
    #             continue
    #         post_url_list.append(post_url)
    # # run_search("127.0.0.1",seed,authorization, ua, device=d)
    # # run_user_timeline("127.0.0.1", seed, authorization, ua, device=d, account_id="61574804956527")
    with open("upload_url_list.txt", "r", encoding='utf-8') as f:
        upload_url_list = f.read().splitlines()
    #
    SEEDS = '''https://www.facebook.com/votchinese/posts/pfbid02xFDWLQB311wHGKHDPtHPEU8RoY5E4qUvga5qS9n4LCoJPKUY3Cbxyv8Wykhxwrgml
    https://www.facebook.com/reel/1394630768981635
    https://www.facebook.com/tsultim.choeda/posts/pfbid037s4XhdMq7T1cTBZXVVXJgw7UyAc94YGvDRMRmvfzVQCZKucAo89nQ5K6KrBqN2rBl
    https://www.facebook.com/CTATibetcom/posts/pfbid0m2KpHirRMMCnhbG6M3AF7dLEcMoekPgXfiTtdghHorLeKWHbu8TZPk4hiDGi5Dijl
    https://www.facebook.com/votchinese/posts/pfbid02jck6RsXWb4PrpcTrWfvZ1tLtEGDQmBfvkF9yRP19vzLc2FrjQ7xwDeDzMoEd7beul
    https://www.facebook.com/ya.chu.chang.767843/posts/pfbid02obPvM9Ad9BZ6mc322CdhgbLNpFWmDrPd6PagaSFKdn25m8yzmYo6nBzDqreTUedTl
    https://www.facebook.com/GadenShartseMonastery/posts/pfbid02AusdvyB7aMVNgJ3WnwhqwUqgU2x3wk3gShRQohBfBWtfVWDvSV6u8BhK4wD6hvzml
    https://www.facebook.com/henry.leefachuan/posts/pfbid0a8et9VTyfM3Wu8LNuMBdCrNBCcD1UaZYXFxWgVeqXFWyg2ba3YWyTgwfnQHJgKECl
    https://www.facebook.com/groups/605348869846008/?multi_permalinks=2541564786224397&hoisted_section_header_type=recently_seen
    https://www.facebook.com/datang80/posts/pfbid02zEsQj2SN9JdhjBsgqutkkqHQManFwpCzMgtmmvKdMcweH4EzJVUyVr9ZbppG8Ad6l
    https://www.facebook.com/kuo.s.tung/posts/pfbid09Z2R1s9zj1D3nk9mZWUUqQE5EVLWFvkYNPRtqUZ8sekbi5VHgPWfqejRETxyGh6al
    https://www.facebook.com/tsungyi.chiu/posts/pfbid02JGLdnPR2rmvFStjDCMSXCLkMxvgHjJQwA8DSQ8Dv9zHuuDhVtGzkBdSLQcQBHwQtl
    https://www.facebook.com/permalink.php?story_fbid=pfbid02uj6N9ELaqjsbeL9qtypWufhNFx8EHhY6jqbwMeWdcp8jE1Rzc72nMHiach9pfRkul&id=100084765632370
    https://www.facebook.com/sydaway/posts/pfbid0JcmBqWT3trTKnTBok4KPy9Vipfa4HhJRZ1bqXVv2Bxn5Ma4KAMLNPFomWtMWRjcjl'''
    seed_list = ["https://www.facebook.com/HONORParaguay/posts/776464528723087",
                 ]
    seed_list = SEEDS.split("\n")
    for post_url in seed_list:
        post_url = post_url.strip()
        if post_url in upload_url_list:
            logger.warning(f"post_url:{post_url} is in upload_url_list continue")
            continue

        seed = {'job_id': 'facebook_hook_20250824105911_381989_33', 'name': 'post', 'page_num': 1,
                'seed': post_url, 'type': 'post'}
        run_post("127.0.0.1", seed, authorization, ua, device=d)
        time.sleep(10)


if __name__ == '__main__':
    server_ip = "127.0.0.1"
    # seed = {'job_id': 'facebook_hook_20250824105911_381989_33', 'name': 'profile', 'page_num': 1,'seed':"100024925790066" , 'type': 'profile'}
    # authorization = 'EAAAAUaZA8jlABPXiozILfsM0szuzTwmgno7R3SI7cTQMsQoXuCfVZBz3pLFPeQy9TVPpFB6MMhw670kFKZAH5Ac5Wy5WCZA4hD1ZBpKoev5olmJoqUukZCwzTxTJpngRDMFd7QVJALZAORwHbUqQ9w8FaZBTh9UtMjwYTFL3a92n1NoI1wxglxjQbKnjPCc2wNVSujBqvZCTSPwZDZD'
    check_local_fid()

    # test_main()
    # d = Device("87a8eacf")
    # check_account_thread(d, stop_flag)
    # exit(0)
    # logger.info(f"Device serial: {d.serial}")
    # logger.info(f"Device model: {d.shell('getprop ro.serialno', user='root').output}")
    # run_profile(server_ip,seed,authorization,"",d)
    # check_account(d,stop_flag)
    # os.system("adb  forward tcp:8080 tcp:8080")

    # stop_flag = True
    # todo(d, "")

    print(get_now_time())
    # 更新设备分组标识
    threading.Thread(target=update_note_thread).start()
    # 检查job更新状态
    threading.Thread(target=check_job).start()
    threading.Thread(target=run_search_post).start()
    worker("", server_ip)
