#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2024/2/19 16:36
# @Author  : 王凯
# @File    : std_gen_spider_test.py
# @Project : scrapy_spider
import concurrent.futures
import os.path
import subprocess
import threading
from pathlib import Path

from loguru import logger

from apps.tax_policy.tax_policy.std_template.config_spider_entry import ConfigSpiderData
from apps.tax_policy.tax_policy.std_template.gen_scrapy_spider import ScrapyGenerator
from utils.db.mysqldb import MysqlDB


class StdGenSpiderTest:
    db_api = MysqlDB()
    base_path = (Path(__file__).parent.parent.parent / 'spiders' / 'chongqing').as_posix() + '/'

    lock = threading.Lock()

    def run_one(self, data):
        print(data)
        spider_file_content = ScrapyGenerator(ConfigSpiderData(**data)).generate()

        cache_base_path = self.base_path + '/cache/'
        if not os.path.exists(cache_base_path):
            os.makedirs(cache_base_path, exist_ok=True)
            with open(cache_base_path + '__init__.py', 'w') as f:
                f.write('')

        cache_file_path = cache_base_path + data['name'] + '.py'
        file_path = self.base_path + data['name'] + '.py'

        # if os.path.exists(file_path):
        #     logger.info(f"文件 {file_path} 已存在")
        #     with open(file_path, "w", encoding="utf-8") as fs:
        #         fs.write(spider_file_content)
        #     return

        logger.info(f"正在生成文件 {cache_file_path}")
        with open(cache_file_path, "w", encoding="utf-8") as fs:
            fs.write(spider_file_content)

        logger.info(f"运行 {cache_file_path}")
        subprocess.run(f"scrapy crawl {data['name']}".split(), check=True)

        try:
            os.remove(cache_file_path)
        except Exception as e:
            print(e)

        with open(file_path, "w", encoding="utf-8") as fs:
            fs.write(spider_file_content)

        self.git_add_file(file_path)

        data.update({"state": 1})
        self.db_api.add_smart("template_spider_tax_policy", data, update_columns=['state'])

    def git_add_file(self, file_path):
        self.lock.acquire()
        try:
            logger.info(f"正在提交文件 {file_path}")
            subprocess.run(f"git add {file_path}".split(), check=True)
        finally:
            # 释放锁
            self.lock.release()

    def run(self):
        datas = self.db_api.find(
            'select * from template_spider_tax_policy where stages is not null and province = "重庆市" and state = 0',
            to_json=True
        )

        with concurrent.futures.ThreadPoolExecutor(max_workers=5) as e:
            for data in datas:
                e.submit(self.run_one, data)


if __name__ == '__main__':
    StdGenSpiderTest().run()
