from flask import Flask
import logging
import os
import sys
from logging.handlers import RotatingFileHandler
from crawler import crawler
from crawler import CectCrawler
from crawler import WeirdCrawler
from concurrent.futures import ThreadPoolExecutor
import threading
from crawler.CrawlerJobProducer import *

app = Flask(__name__)


def setup_log():
    """配置日志"""

    # 设置日志的记录等级
    #logging.basicConfig(level=logging.INFO)  # 调试debug级
    # 创建日志记录器，指明日志保存的路径、每个日志文件的最大大小、保存的日志文件个数上限
    print(os.getcwd())
    file_log_handler = RotatingFileHandler(os.getcwd()+"/logs/rotating_logging.log",
                                           maxBytes=1024 * 1024 * 100, backupCount=10)
    # 创建日志记录的格式 日志等级 输入日志信息的文件名 行数 日志信息
    formatter = logging.Formatter('%(asctime)s - %(module)s - %(thread)d - %(levelname)s : %(message)s')
    # 为刚创建的日志记录器设置日志记录格式
    file_log_handler.setFormatter(formatter)
    # 为全局的日志工具对象（flask app使用的）添加日志记录器

    console_handler = logging.StreamHandler(sys.stdout)
    console_handler.setFormatter(formatter)
    file_log_handler.setLevel(level=logging.INFO)
    console_handler.setLevel(level=logging.INFO)
    app.logger.addHandler(file_log_handler)
    app.logger.addHandler(console_handler)
    print(logging)

# 方案3：线程池
# executor = ThreadPoolExecutor(1)


@crawler.route('/api/hello')
def hello():
    app.logger.info(">>>>>>>>>>开始运行 hello <<<<<<<<<<<<")
    rs = "Hello World!"
    return rs

# 方案1、3 Job
# def do_job(crawler_name):
#     if crawler_name == 'cect':
#         app.logger.info("CectCrawler被调用")
#         CectCrawler.main()
#     else:
#         app.logger.info("1024 Crawler被调用")
#         WeirdCrawler.main()

@crawler.route('/api/call_crawler/<crawler_name>')
def call_crawler(crawler_name):
    app.logger.info(">>>>>>>>>>开始运行 call crawler <<<<<<<<<<<<")
    producer = CrawlerJobProducer("192.168.56.10", 9092, "crawler.queue")
    producer.sendmesg(crawler_name)
    # 方案1：线程threading
    # thr = threading.Thread(do_job(crawler_name))
    # thr.start()

    # 方案2：@copy_current_request_context
    # def do_job(crawler_name):
    #     if crawler_name == 'cect':
    #         app.logger.info("CectCrawler被调用")
    #         CectCrawler.main()
    #     else:
    #         app.logger.info("1024 Crawler被调用")
    #         WeirdCrawler.main()

    # 方案3：线程池
    # do_job(crawler_name)
    # executor.submit(do_job(crawler_name))
    app.logger.info(">>>>>>>>>>结束运行 call crawler <<<<<<<<<<<<")
    return "调用爬虫：" + crawler_name


def main():
    setup_log()
    app.register_blueprint(crawler, url_prefix='/crawler')
    app.run(debug=True, host="0.0.0.0")


if __name__ == '__main__':
    main()

