import json
import re
import time

import requests
import scrapy
from spidertools.utils.time_utils import get_current_date

from commonresources.spider_items.national.items import ZhongGuoZhaoBiaoTouBiaoFuWuPingTaiItem
from commonresources.spiders.basespider import BaseSpider


class ZhongGuoZhaoBiaoTouBiaoFuWuPingTaiSpider(BaseSpider):
    """
        中国招标投标服务平台
                    首页：http://www.cebpubservice.com/index.shtml
                    交易信息页：http://www.cebpubservice.com/ctpsp_iiss/searchbusinesstypebeforedooraction/getSearch.do#
    """
    name = 'ZhongGuoZhaoBiaoTouBiaoFuWuPingTai'
    name_zh = "中国招标投标服务平台"
    province = "national"
    start_urls = ["http://www.cebpubservice.com/ctpsp_iiss/searchbusinesstypebeforedooraction/getSearch.do"]

    def __init__(self, full_dose=False):
        self.total_count = 0
        self.announcement_type_list = []
        self.platform = {}
        super(ZhongGuoZhaoBiaoTouBiaoFuWuPingTaiSpider, self).__init__(full_dose)

    def parse(self, response):
        announcement_types = response.xpath('//ul[@id="myTab3"]/li/p')
        for announcement_type in announcement_types:
            self.announcement_type_list.append(announcement_type.xpath('./text()').extract_first())

    def start_requests(self):
        yield scrapy.FormRequest(
            url="http://www.cebpubservice.com/ctpsp_iiss/searchalltjaction/searchPortalNmae.do",
            callback=self.handle_response,
            headers={"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
                                   "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.75 Safari/537.36"})

    def refresh_cookie(self):
        response = requests.get(
            url="http://www.cebpubservice.com/ctpsp_iiss/searchbusinesstypebeforedooraction/getSearch.do",
            headers={
                "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
                "Accept-Encoding": "gzip, deflate",
                "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
                "Host": "www.cebpubservice.com",
                "Proxy-Connection": "keep-alive",
                "Upgrade-Insecure-Requests": "1",
                "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36 Edg/86.0.622.69",
            }
        )
        JSESSIONID = re.findall(r"JSESSIONID=(.*?);", response.headers['Set-Cookie'])[0]
        acw_tc = re.findall(r"acw_tc=(.*?);", response.headers['Set-Cookie'])[0]
        return JSESSIONID, acw_tc

    def handle_response(self, response):
        platforms = json.loads(response.text)['object']['listNationPublic']
        if not self.announcement_type_list:
            self.announcement_type_list = ["招标项目", "招标公告", "开标记录", "评标公示", "中标公告", "签约履行"]
        for announcement_type in self.announcement_type_list:
            if announcement_type !="招标项目":
                continue

            JSESSIONID, acw_tc = self.refresh_cookie()
            headers = self.fake_post_request_headers(JSESSIONID, acw_tc)
            for platform in platforms:
                platform_code = platform['platformCode']
                platform_name = platform['platformName']
                self.platform[platform_code] = platform_name
                self.total_count += int(platform['platformcount'])
                if platform == platforms[-1]:
                    print(f"============所有平台总数据量：{self.total_count}==============")
                if self.full_dose:
                    """特点：起始面广，分200+入口：43*6》》》适用于全量任务"""
                    formdata = self.fake_post_form_data(platform_code, announcement_type, "全部", 1)
                    yield scrapy.FormRequest(
                        url="http://www.cebpubservice.com/ctpsp_iiss/searchbusinesstypebeforedooraction/getStringMethod.do",
                        headers=headers,
                        formdata=formdata,
                        callback=self.handle_detail_page,
                        meta={
                            "platform_code": platform_code,
                            "platform_name": platform_name,
                            "announcement_type": announcement_type,
                            "need_break": False,
                            "page": 1,
                        }
                    )
            if not self.full_dose:
                """特点：6个入口》》》适用于定时任务"""
                formdata = self.fake_post_form_data("", announcement_type, "今日", 1)
                yield scrapy.FormRequest(
                    url="http://www.cebpubservice.com/ctpsp_iiss/searchbusinesstypebeforedooraction/getStringMethod.do",
                    headers=headers,
                    formdata=formdata,
                    callback=self.handle_detail_page,
                    meta={
                        "platform_code": "",
                        "platform_name": "",
                        "announcement_type": announcement_type,
                        "need_break": False,
                        "page": 1,

                    }
                )

    def fake_post_request_headers(self, JSESSIONID, acw_tc):
        return {
            "Accept": "application/json, text/javascript, */*; q=0.01",
            "Accept-Encoding": "gzip, deflate",
            "Accept-Language": "zh-CN,zh;q=0.9",
            "Connection": "keep-alive",
            # "Content-Length": "228",
            "Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
            "Cookie": f"Hm_lvt_ef2114bed21175425a21bb8a1e40ebdf={int(time.time())}; "
                      f"Hm_lpvt_ef2114bed21175425a21bb8a1e40ebdf={int(time.time())};"
                      f" JSESSIONID={JSESSIONID}; acw_tc={acw_tc}; cmsurl=/../../index.shtml",
            "Host": "www.cebpubservice.com",
            "Origin": "http://www.cebpubservice.com",
            "Referer": "http://www.cebpubservice.com/ctpsp_iiss/searchbusinesstypebeforedooraction/getSearch.do",
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) "
                          "Chrome/86.0.4240.75 Safari/537.36",
            "X-Requested-With": "XMLHttpRequest",
        }

    def fake_post_form_data(self, transactionPlatfCode, businessType, bulletinIssnTime, page):
        return {
            "searchName": "",
            "searchArea": "",
            "searchIndustry": "",
            "centerPlat": "" if not transactionPlatfCode else 'transactionPlatfCode:' + transactionPlatfCode,
            "businessType": f"{businessType}",
            "searchTimeStart": "",
            "searchTimeStop": "",
            "timeTypeParam": "",
            "bulletinIssnTime": f"{bulletinIssnTime}",
            "bulletinIssnTimeStart": "",
            "bulletinIssnTimeStop": "",
            "pageNo": f"{page}",
            "row": "15",
        }

    def handle_detail_page(self, response):
        print(response.text)



