#coding=utf-8

import argparse,json,os
from xiapibuy import Xiapibuy
from pysqlite import sqliteDB
from pathlib import Path
import time

BASE_DIR = Path(os.path.realpath(__file__)).parents[2]
db_path = os.path.join(BASE_DIR,"app","downdb.db")
db = sqliteDB(db_path)




def get_token_from_db(str_app_key):
    res =  db.findall("spider_user_control","user_appkey='{}'".format(str_app_key))
    #print(res)
    if len(res) ==1:
        return res[0][6]
    return ""
        
'''
每隔60*60*5 时间执行一次
使用已经插入的cookie进行关注，对每个店铺选中进行取消关注
一次50
'''
def main_loop():
    xp = Xiapibuy()
    while True:
        time.sleep(60*60*3)
    
        #关注cat
        res =  db.findall("spider_worker_list","")
        for item in res:
            # 是以||分隔
            str_cookie = get_token_from_db(item[1])
            xp.xiapi_set_cookie(str_cookie)
            xp.xiapi_main_run_get_unfollower()
            



if __name__ == '__main__':
    main_loop()
    # parser = argparse.ArgumentParser(description=
    #         "xiapi web spider")
    # parser.add_argument("-cf", "--cat_follow",type=int,
    #         help="category follow")
    # parser.add_argument("-urlf", "--url_follow",
    #         help="url follow")
    # parser.add_argument("-uf", "--un_follow",
    #         help="unfollow")
    # parser_main(parser.parse_args())
            