import re

import feapder
from tools import get_crawlFlag, parse_money_toFloat, db
from feapder.utils.log import log
from items.gdgpo_item import GdgpoItem

base_url = "https://gdgpo.czt.gd.gov.cn"
replace_url = 'https://gdgpo.czt.gd.gov.cn/freecms/rest/v1/notice/selectInfoMoreChannel.do?&pageSize=10&noticeType=00102&regionCode=440001&purchaseManner={}&openTenderCode=&purchaser=&agency=&purchaseNature=&operationStartTime=2021-01-01%2000:00:00&operationEndTime=&selectTimeName=noticeTime'
word1 = {'&title=%E6%8E%92%E6%B0%B4': '排水'}
word2 = {'&title=%E6%8E%92%E6%B1%A1': '排污'}
word3 = {'&title=%E9%9B%A8%E6%B0%B4': '雨水'}
word4 = {'&title=%E7%AE%A1%E9%81%93': '管道'}
word5 = {'&title=%E7%AE%A1%E7%BD%91': '管网'}
word6 = {'&title=%E7%8E%AF%E5%A2%83%E6%B2%BB%E7%90%86': '环境治理'}
word7 = {'&title=%E6%8E%92%E6%B6%9D': '排涝'}
word8 = {'&title=%E7%8E%AF%E5%A2%83%E6%95%B4%E6%B2%BB': '环境整治'}
word9 = {'&title=%E9%9D%9E%E5%BC%80%E6%8C%96': '非开挖'}
keyword_list = [word1, word2, word3, word4, word5, word6, word7, word8, word9]

tb_name = "gdgpo"

CRAWL_FLAG = get_crawlFlag()


def init_data_from_web(data_obj, item):
    data_obj.crawl_flag = CRAWL_FLAG
    data_obj.title = item["title"]
    data_obj.page_url = base_url + item["pageurl"]
    data_obj.project_manager = item["agentManageName"]
    data_obj.project_id = item["openTenderCode"]
    data_obj.purchase_id = item["planCodes"]
    data_obj.region = item["regionName"]
    data_obj.budget = parse_money_toFloat(item["budget"])
    data_obj.purchaser = item["purchaser"]
    data_obj.purchase_type = item["catalogueNameList"]
    data_obj.publish_time_str = item["noticeTime"]
    data_obj.publish_time = item["noticeTime"]
    data_obj.agency = item["agency"]


def init_data_from_db(data_obj, item):
    data_obj.crawl_flag = CRAWL_FLAG
    data_obj.title = item["title"]
    data_obj.page_url = item["page_url"]
    data_obj.project_manager = item["project_manager"]
    data_obj.project_id = item["project_id"]
    data_obj.purchase_id = item["purchase_id"]
    data_obj.region = item["region"]
    data_obj.budget = item["budget"]
    data_obj.purchaser = item["purchaser"]
    data_obj.purchase_type = item["purchase_type"]
    data_obj.publish_time_str = item["publish_time_str"]
    data_obj.publish_time = item["publish_time"]
    data_obj.agency = item["agency"]


class SpiderTest(feapder.AirSpider):

    def start_requests(self):
        for word_dict in keyword_list:
            for w in word_dict.keys():
                yield feapder.Request(replace_url.format(w), query_keyword=word_dict[w])

    def parse(self, request, response):
        data_obj = GdgpoItem(query_keyword=request.query_keyword)
        data_json_entire = response.json.get("data")
        for item in data_json_entire:
            log.info(item)
            # item.pop("content")
            title = item["title"]
            url = base_url + item["pageurl"]
            init_data_from_web(data_obj, item)
            check_data = db.find(sql="select * from gdgpo where purchase_id = '" + data_obj.purchase_id + "'",
                                 to_json=True)
            if not check_data:
                op_res = db.add_smart(table=tb_name, data=data_obj.to_dict)
                op_type = 1
            else:
                op_res = db.update_smart(table=tb_name, data=data_obj.to_UpdateItem().to_dict,
                                         condition="purchase_id='" + data_obj.purchase_id + "'")
                op_type = 2
            if op_res:
                if op_type == 1:
                    log.info("#########first insert over##########")
                elif op_type == 2:
                    log.info("#########first update over##########")
            else:
                log.info("!!!!!!!!! update fail !!!!!!!!")

            yield feapder.Request(url, callback=self.parse_detail, title=title,
                                  purchase_id=data_obj.purchase_id,query_keyword=request.query_keyword)

    def parse_detail(self, request, response):
        """
        解析详情
        """
        response.encoding_errors = "ignore"
        url = request.url
        title = request.title
        purchase_id = request.purchase_id
        data_obj_json = db.find(sql="select * from gdgpo where purchase_id = '" + purchase_id + "'", to_json=True)[0]
        data_obj = GdgpoItem(query_keyword=request.query_keyword)
        init_data_from_db(data_obj, data_obj_json)
        log.debug(data_obj)
        log.info("req url: " + url)
        log.info("req title: " + title)
        bs = response.bs4()
        supplier_name = bs.find("table").find_all("tr")[1].find_all("td")[0].get_text().strip()
        supplier_address = bs.find("table").find_all("tr")[1].find_all("td")[1].get_text().strip()
        deal_amount = bs.find("table").find_all("tr")[1].find_all("td")[2].get_text().strip()
        deal_amount = parse_money_toFloat(deal_amount)
        log.info("供应商(中标单位): " + supplier_name)
        log.info("供应商地址: " + supplier_address)
        log.info("中标金额: " + str(deal_amount))
        data_obj.supplier_name = supplier_name
        data_obj.supplier_address = supplier_address
        data_obj.deal_amount = deal_amount
        project_name = bs.find("h4", string=re.compile("项目名称"))
        if project_name:
            project_name = project_name.get_text()
            project_name = project_name[project_name.find("：") + 1:]
            data_obj.project_name = project_name
        update_res = db.update_smart(table=tb_name, data=data_obj.to_UpdateItem().to_dict,
                                     condition="purchase_id='" + data_obj.purchase_id + "'")
        if update_res:
            log.info("#########update over#########")
        else:
            log.info("!!!!!!!!! update fail !!!!!!!!")
        log.debug(data_obj.to_dict)


if __name__ == "__main__":
    # SpiderTest(thread_count=15).start()
    SpiderTest().start()
