# coding=utf8
import threading

from settings import DINGDINGROBOT_URL_PROXY
from support_services.support_proxies import SupportProxies
from support_services.support_redis import SupportRedis
from support_services.except_back import ExceptBack
from tools.logger import Logger


def main():
    name = "pycrawler_service"

    logger = Logger(name)

    except_back = ExceptBack(name + "_redis_maintain")
    except_back_proxies = ExceptBack(name + "_proxies_pool", url=DINGDINGROBOT_URL_PROXY)

    # 代理池
    threading.Thread(target=SupportProxies(error_back=except_back_proxies).run, args=(logger, )).start()

    # 更新服务器redis的数据到本地
    threading.Thread(target=SupportRedis(error_back=except_back).run, args=(logger, )).start()


if __name__ == "__main__":
    main()
