#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2023/9/12 14:22
# @Author  : 王凯
# @File    : main.py
# @Project : spider-man
import hashlib
import re
from pathlib import Path

import numpy as np
import pandas as pd
import pypinyin
import requests
from jinja2 import Template

from utils.logs import log

policy_index_spider_create_page_template = Template(
    open(
        Path(__file__).parent.parent / "template" / "policy_index_spider_create_page.templ", "r", encoding="utf-8"
    ).read()
)

# policy_policy_page_jsonp_template = Template(
#     open(Path(__file__).parent.parent / "template" / "policy_jsonp_page.templ", "r", encoding="utf-8").read()
# )

policy_policy_page_datasource_template = Template(
    open(Path(__file__).parent.parent / "template" / "policy_page_datasource.templ", "r", encoding="utf-8").read()
)

policy_dhtml_spider_template = Template(
    open(Path(__file__).parent.parent / "template" / "policy_dhtml_spider.templ", "r", encoding="utf-8").read()
)

policy_next_page_spider_template = Template(
    open(Path(__file__).parent.parent / "template" / "policy_next_page_spider.templ", "r", encoding="utf-8").read()
)

policy_index_spider_template = Template(
    open(Path(__file__).parent.parent / "template" / "policy_index_spider.templ", "r", encoding="utf-8").read()
)

policy_index_spider_plus_template = Template(
    open(Path(__file__).parent.parent / "template" / "policy_index_plus_spider.templ", "r", encoding="utf-8").read()
)

policy_gkmlpt_spider_template = Template(
    open(Path(__file__).parent.parent / "template" / "policy_gkmlpt_spider.templ", "r", encoding="utf-8").read()
)

policy_dataproxy_spider_template = Template(
    open(Path(__file__).parent.parent / "template" / "policy_dataproxy_spider.templ", "r", encoding="utf-8").read()
)

policy_cms_spider_template = Template(
    open(Path(__file__).parent.parent / "template" / "policy_cms_spider.templ", "r", encoding="utf-8").read()
)

policy_list_html_spider_template = Template(
    open(Path(__file__).parent.parent / "template" / "policy_list_html_spider.templ", "r", encoding="utf-8").read()
)

policy_search_class_sql_spider_template = Template(
    open(
        Path(__file__).parent.parent / "template" / "policy_search_class_sql_spider.templ", "r", encoding="utf-8"
    ).read()
)


class GenTemplateSpider:
    base_spider_dir = Path(__file__).parent.parent.parent / "spiders" / "shanxi"

    def run_normal(
        self,
        file_name,
        province="",
        city="",
        county="",
        park="",
        source="",
        url="",
        next_page_href_xpath="//a[@class='last']",
        list_a_xpath="//*[@class='newsList']//a",
        *args,
        **kwargs,
    ):
        file_name = "{base_path}_{source}_{hash}_policy".format(
            base_path="shanxi",
            source="".join([j[0][0] for j in pypinyin.pinyin(source, style=pypinyin.NORMAL)])
            .replace("（", "")
            .replace("）", ""),
            hash=hashlib.md5(url.encode("utf-8")).hexdigest()[:4],
        )
        print(file_name, province, city, county, park, source, url)
        s = requests.get(
            url,
            headers={
                "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
                "Accept-Language": "zh-CN,zh;q=0.9,zu;q=0.8,be;q=0.7,en;q=0.6",
                "Cache-Control": "no-cache",
                "Connection": "keep-alive",
                "Pragma": "no-cache",
                "Upgrade-Insecure-Requests": "1",
                "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
            },
        )
        resp = s.content.decode()
        if "createPageHTML" in resp:
            self.gen_index_spider_create_page(
                file_name=file_name, province=province, city=city, county=county, park=park, source=source, url=url
            )
        else:
            self.gen_index_plus_spider(
                file_name=file_name, province=province, city=city, county=county, park=park, source=source, url=url
            )

    @staticmethod
    def gen_index_spider(
        file_name,
        province="",
        city="",
        county="",
        park="",
        source="",
        url="",
        next_page_href_xpath="//a[contains(@text, '尾页')]",
        list_a_xpath=["//ul//li//a", "//dl//a"],
    ):
        code = policy_index_spider_template.render(
            function_name=file_name.title().replace("_", ""),
            province=province,
            city=city,
            county=county,
            park=park,
            source=source,
            url=url,
            next_page_href_xpath=next_page_href_xpath,
            list_a_xpath=list_a_xpath,
        )

        with open(GenTemplateSpider.base_spider_dir / f"{file_name}.py", "w", encoding="utf-8") as f:
            f.write(code)
        log.info(f"{file_name} index_spider 代码生成完成")

    @staticmethod
    def gen_index_plus_spider(
        file_name,
        province="",
        city="",
        county="",
        park="",
        source="",
        url="",
        total_page_xpath=".",
            total_page_xpath_reg=r"""(?:(?:page|record)Count:|(?:pageShow\([^\)]{1,50}?\),)|(?:countPage\s*=))\s*['\"]?(\d+)['\"]?""",
            list_a_xpath=["//ul[contains(@class, 'list')]//li//a", "//dl[contains(@class, 'list')]//dt//a"],
    ):
        code = policy_index_spider_plus_template.render(
            function_name=file_name.title().replace("_", ""),
            name=file_name,
            province=province,
            city=city,
            county=county,
            park=park,
            source=source,
            url=url,
            total_page_xpath=total_page_xpath,
            total_page_xpath_reg=total_page_xpath_reg,
            list_a_xpath=list_a_xpath,
        )

        with open(GenTemplateSpider.base_spider_dir / f"{file_name}.py", "w", encoding="utf-8") as f:
            f.write(code)
        log.info(f"{file_name} index_spider 代码生成完成")

    @staticmethod
    def gen_dhtml_spider(
        file_name,
        province="",
        city="",
        county="",
        park="",
        source="",
        url="",
        total_page_xpath="//div[@class='fenye']",
        total_page_xpath_reg=r"_(\d+)\.html\">尾页",
        list_a_xpath=['//table[@class="table table-striped table-hover"]//a'],
        end_tail="_1.html",
    ):
        code = policy_dhtml_spider_template.render(
            function_name=file_name.title().replace("_", ""),
            province=province,
            city=city,
            county=county,
            park=park,
            source=source,
            url=url,
            total_page_xpath=total_page_xpath,
            total_page_xpath_reg=total_page_xpath_reg,
            list_a_xpath=list_a_xpath,
            end_tail=end_tail,
        )

        with open(GenTemplateSpider.base_spider_dir / f"{file_name}.py", "w", encoding="utf-8") as f:
            f.write(code)
        log.info(f"{file_name} dhtml_spider 代码生成完成")

    @staticmethod
    def gen_next_page_spider(
        file_name,
        province="",
        city="",
        county="",
        park="",
        source="",
        url="",
        total_page_xpath="//span[@class='p_next p_fun']",
        next_page_xpath_reg=r".*href=\"(.*?)\"\s*>下",
        list_a_xpath=['//ul[@class="gkzdList"]//a'],
        end_tail="_1.html",
    ):
        code = policy_next_page_spider_template.render(
            function_name=file_name.title().replace("_", ""),
            province=province,
            city=city,
            county=county,
            park=park,
            source=source,
            url=url,
            total_page_xpath=total_page_xpath,
            next_page_xpath_reg=next_page_xpath_reg,
            list_a_xpath=list_a_xpath,
            end_tail=end_tail,
        )

        with open(GenTemplateSpider.base_spider_dir / f"{file_name}.py", "w", encoding="utf-8") as f:
            f.write(code)
        log.info(f"{file_name} dhtml_spider 代码生成完成")

    @staticmethod
    def gen_index_spider_create_page(
        file_name,
        province="",
        city="",
        county="",
        park="",
        source="",
        url="",
        list_a_xpath=None,
        next_page_href_xpath="//a[contains(text(), '下一页')]",
        total_page_xpath_reg=r"createPageHTML\('page_div',[\"\']?(\d+)[\"\']?,",
        end_tail="list",
    ):
        if not list_a_xpath:
            list_a_xpath = ["//ul[contains(@class, 'list')]//li//a"]
        code = policy_index_spider_create_page_template.render(
            function_name=file_name.title().replace("_", ""),
            name=file_name,
            province=province,
            city=city,
            county=county,
            park=park,
            source=source,
            url=url,
            next_page_href_xpath=next_page_href_xpath,
            list_a_xpath=list_a_xpath,
            total_page_xpath_reg=total_page_xpath_reg,
            end_tail=end_tail,
        )

        with open(GenTemplateSpider.base_spider_dir / f"{file_name}.py", "w", encoding="utf-8") as f:
            f.write(code)
        log.info(f"{file_name} index_spider 代码生成完成")

    @staticmethod
    def gen_dataproxy_spider(file_name, province="", city="", county="", park="", source="", url=""):
        code = policy_dataproxy_spider_template.render(
            function_name=file_name.title().replace("_", ""),
            province=province,
            city=city,
            county=county,
            park=park,
            source=source,
            url=url,
        )

        with open(GenTemplateSpider.base_spider_dir / f"{file_name}.py", "w", encoding="utf-8") as f:
            f.write(code)
        log.info(f"{file_name} index_spider 代码生成完成")

    @staticmethod
    def gen_page_datasource_spider(file_name, province="", city="", county="", park="", source="", url=""):
        code = policy_policy_page_datasource_template.render(
            function_name=file_name.title().replace("_", ""),
            province=province,
            city=city,
            county=county,
            park=park,
            source=source,
            url=url,
        )

        with open(GenTemplateSpider.base_spider_dir / f"{file_name}.py", "w", encoding="utf-8") as f:
            f.write(code)
        log.info(f"{file_name} page_datasource 代码生成完成")

    @staticmethod
    def gen_list_spider(file_name, province="", city="", county="", park="", source="", url=""):
        code = policy_list_html_spider_template.render(
            function_name=file_name.title().replace("_", ""),
            province=province,
            city=city,
            county=county,
            park=park,
            source=source,
            url=url,
        )

        with open(GenTemplateSpider.base_spider_dir / f"{file_name}.py", "w", encoding="utf-8") as f:
            f.write(code)
        log.info(f"{file_name} page_jsonp_template 代码生成完成")

    @staticmethod
    def gen_cms_spider(file_name, province="", city="", county="", park="", source="", url=""):
        code = policy_cms_spider_template.render(
            function_name=file_name.title().replace("_", ""),
            province=province,
            city=city,
            county=county,
            park=park,
            source=source,
            url=url,
        )

        with open(GenTemplateSpider.base_spider_dir / f"{file_name}.py", "w", encoding="utf-8") as f:
            f.write(code)
        log.info(f"{file_name} index_spider 代码生成完成")

    @staticmethod
    def gen_gkmlpt_spider(
        file_name,
        province="",
        city="",
        county="",
        park="",
        source="",
        url="",
        next_page_href_xpath="//a[@class='last']",
        list_a_xpath="//*[@class='newsList']//a",
    ):
        base_host = re.findall(r"(.*?/gkmlpt)", url)[0]
        base_index = re.findall(r"#(\d+)", url)[0]
        code = policy_gkmlpt_spider_template.render(
            function_name=file_name.title().replace("_", ""),
            province=province,
            city=city,
            county=county,
            park=park,
            source=source,
            url=url,
            post_url=f"{base_host}/api/all/{base_index}",
        )

        with open(GenTemplateSpider.base_spider_dir / f"{file_name}.py", "w", encoding="utf-8") as f:
            f.write(code)
        log.info(f"{file_name} gkmlpt_spider 代码生成完成")

    @staticmethod
    def gen_search_class_sql_spider(file_name, province="", city="", county="", park="", source="", url=""):
        code = policy_search_class_sql_spider_template.render(
            function_name=file_name.title().replace("_", ""),
            province=province,
            city=city,
            county=county,
            park=park,
            source=source,
            url=url,
        )

        with open(GenTemplateSpider.base_spider_dir / f"{file_name}.py", "w", encoding="utf-8") as f:
            f.write(code)
        log.info(f"{file_name} search_class_sql 代码生成完成")


if __name__ == "__main__":
    df = pd.read_csv(Path(__file__).parent / "cache.csv")
    df = df.rename(
        columns={"来源网站": "source", "网址": "url", "省份": "province", "城市": "city", "区县": "county", "文件名称": "file_name"}
    )
    df = df.replace({np.NAN: ""})
    for i in df.to_dict("records"):
        p = {**i, "next_page_href_xpath": "//a[@class='last']", "list_a_xpath": "//*[contains(@class, 'list')]//a"}
        GenTemplateSpider().run_normal(**p)
