# -*- coding: utf-8 -*-
# @Time : 2020/11/23 11:22
# @Author : zhangxing
# @File : jiangsushenggonglushuilujiansheshichangxinyongxinxifuwuxitong.py
import scrapy
from spidertools.utils.time_utils import get_current_date

from commonresources.spider_items.base_item import convert_dict
from commonresources.spider_items.jiangsu.items import JiangSuShengGongLuShuiLuJianSheShiChangXinYongXinXiFuWuXiTongItem


class JiangSuShengGongLuShuiLuJianSheShiChangXinYongXinXiFuWuXiTongSpider(scrapy.Spider):
    '''
        江苏省公路水路建设市场信用信息服务系统 http://218.2.208.148:8084/JDPT/fileMain/fileMainInfo/publicshow?type=1
    '''
    name = 'JiangSuShengGongLuShuiLuJianSheShiChangXinYongXinXiFuWuXiTong'
    name_zh = '江苏省公路水路建设市场信用信息服务系统'
    province = "江苏"
    city = ''
    start_urls = ['http://218.2.208.144:8094/EBTS/publish/announcement/paglist1?type=1']

    def __init__(self, full_dose=False, not_full_type=False):
        self.browser_cookie = {}
        self.convert_dict = convert_dict
        self.full_dose = full_dose
        self.not_full_type = not_full_type
        super().__init__()

    def parse(self, response):
        objs_city_type = response.xpath('//select[@id="area"]/option')
        construction_type = '招标公告'
        objs_info_type = response.xpath('//select[@id="tender_project_type"]/option')
        for i in range(1, len(objs_info_type)):
            value = objs_info_type[i].xpath('./@value').extract_first()
            info_type = objs_info_type[i].xpath('./text()').extract_first()
            for j in range(2, len(objs_city_type)):
                city_value = objs_city_type[j].xpath('./@value').extract_first()
                project_city = objs_city_type[j].xpath('./text()').extract_first()
                formdata = self.fake_data(value, city_value)
                headers = self.fake_headers()
                yield scrapy.FormRequest(
                    url='http://218.2.208.144:8094/EBTS/publish/announcement/getList?placard_type=1',
                    formdata=formdata,
                    callback=self.handle_response,
                    dont_filter=True,
                    headers=headers,
                    meta={
                        'page_count': -1,
                        'page': 1,
                        'construction_type': construction_type,
                        'need_break': False,
                        'project_city': project_city,
                        'city_value': city_value,
                        'value': value,
                        'info_type': info_type,
                    },
                )
        construction_type = '中标公告'
        for j in range(2, len(objs_city_type)):
            city_value = objs_city_type[j].xpath('./@value').extract_first()
            project_city = objs_city_type[j].xpath('./text()').extract_first()
            yield scrapy.FormRequest(
                url='http://218.2.208.144:8094/EBTS/publish/announcement/query',
                formdata=self.fake_data_type_second(city_value,1),
                callback=self.handle_response,
                dont_filter=True,
                headers={
                    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3823.400 QQBrowser/10.7.4307.400"
                },
                meta={
                    'page_count': -1,
                    'page': 1,
                    'construction_type': construction_type,
                    'need_break': False,
                    # 'city_value': city_value,  # 南京
                    'project_city': project_city, # 南京市
                    # 'value': value,
                    # 'info_type': info_type,
                },
            )

    def fake_data_type_second(self, area, page):
        return {
            "message": "",
            "searchFlgMenu": "",
            "menuType": "",
            "enterpriseType": "",
            "zb_placard_name": "",
            "tender_name": "",
            "project_name": "",
            "bid_unit_name": "",
            "zb_unit_id": "",
            "agent_unit_name": "",
            "area": f"{area if area != '南京' else '001'}",
            "page": f"{page}",
            "rows": "10",
        }

    def fake_headers(self):
        return {
            "Accept": "application/json, text/javascript, */*; q=0.01",
            "Accept-Encoding": "gzip, deflate",
            "Accept-Language": "zh-CN,zh;q=0.9",
            "Connection": "keep-alive",
            "Content-Type": "application/x-www-form-urlencoded",
            "Host": "218.2.208.144:8094",
            "Origin": "http://218.2.208.144:8094",
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.75 Safari/537.36",
            "X-Requested-With": "XMLHttpRequest",
        }

    def fake_data(self, info_type, project_city, page=1):
        return {
            "placard_type": "1",
            "message": "",
            "submitForm": "",
            "placard_name": "",
            "tender_project_type": f"{info_type}",
            "area": f"{project_city}",
            "page": f"{page}",
            "rows": "10",
        }

    def handle_response(self, response):
        if response.meta['construction_type'] == "招标公告":
            jsons = response.json()
            if response.meta['page_count'] == -1:
                if not jsons['total']:
                    response.meta['need_break'] = True
                response.meta['page_count'] = int(jsons['total']) // 10 + 1
            objs = jsons['rows']
            if not response.meta['need_break']:
                for row in objs:
                    item = dict()
                    item['release_time'] = row['placard_send_date']
                    if not self.full_dose and item['release_time'] != get_current_date():
                        response.meta['need_break'] = True
                    elif item['release_time'] < "2015-12-31":
                        response.meta['need_break'] = True
                    else:
                        item['announcement_title'] = row['placard_name']
                        if "-" in row['placard_id']:
                            item['first_url'] = "http://218.2.208.148:9092/OP/Bidding/BiddingNoticeAdditionalView.aspx?bnid="+ row[
                                'placard_id']
                            item['origin_url'] = "http://218.2.208.148:9092/api/BiddingNotice/GetByKeys?BNID=" + row['placard_id']
                        else:
                            item[
                                'origin_url'] = f"http://218.2.208.144:8094/EBTS/publish/announcement/doEdit?proId={row['placard_id']}"
                        item['construction_type'] = response.meta['construction_type']
                        item['project_city'] = response.meta['project_city']
                        item['info_type'] = response.meta['info_type']
                        yield scrapy.Request(url=item['origin_url'],
                                             callback=self.parse_item,
                                             dont_filter=True,
                                             headers={
                                                     "Host": "218.2.208.148:9092",
                                                     "Accept-Encoding": "gzip, deflate",
                                                     "Accept": "*/*",
                                                     "Connection": "keep-alive",
                                                     "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36 Edg/86.0.622.69",
                                                 },
                                             meta=item
                                             )

            if not response.meta['need_break']:
                page = response.meta['page']
                page_count = response.meta['page_count']
                if page < page_count:
                    page += 1
                    yield scrapy.FormRequest(
                        url='http://218.2.208.144:8094/EBTS/publish/announcement/getList?placard_type=1',
                        formdata=self.fake_data(response.meta['value'], response.meta['city_value'], page),
                        callback=self.handle_response,
                        dont_filter=True,
                        headers=self.fake_headers(),
                        meta={
                            'page_count': page_count,
                            'page': page,
                            'construction_type': response.meta['construction_type'],
                            'need_break': False,
                            'project_city': response.meta['project_city'],
                            'city_value': response.meta['city_value'],
                            'value': response.meta['value'],
                            'info_type': response.meta['info_type'],
                        },
                    )

        elif response.meta['construction_type'] == "中标公告":
            jsons = response.json()
            if response.meta['page_count'] == -1:
                if not jsons['total']:
                    response.meta['need_break'] = True
                response.meta['page_count'] = int(jsons['total']) // 10 + 1
            objs = jsons['rows']
            if not response.meta['need_break']:
                for row in objs:
                    item = dict()
                    if row['is_new_tuisong'] == '1':  # 关键点
                        continue
                    item['release_time'] = row['zb_placard_send_date'][0:10]
                    if not self.full_dose and item['release_time'] != get_current_date():
                        response.meta['need_break'] = True
                    else:
                        item['announcement_title'] = row['zb_placard_name']
                        if "-" in row['zb_placard_id']:
                            item['first_url'] = "http://218.2.208.148:9092/OP/WinBid/BidWinShowEdit.aspx?action=view&BWSID="+ row[
                                'zb_placard_id']
                            item['origin_url'] = "http://218.2.208.148:9092/api/BidWinShow/GetByKeys?BWSID=" + row[
                                'zb_placard_id']
                        else:
                            item[
                                'origin_url'] = f"http://218.2.208.144:8094/EBTS/publish/announcement/edit?str={row['zb_placard_id']},{row['zb_placard_flag']}"
                        item['construction_type'] = response.meta['construction_type']
                        item['project_city'] = response.meta['project_city']
                        yield scrapy.Request(url=item['origin_url'],
                                             callback=self.parse_item,
                                             dont_filter=True,
                                             headers={
                                                 "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.75 Safari/537.36",
                                             },
                                             meta=item)

                if not response.meta['need_break']:
                    page = response.meta['page']
                    page_count = response.meta['page_count']
                    project_city = response.meta['project_city']
                    if page < page_count:
                        page += 1
                        yield scrapy.FormRequest(
                            url='http://218.2.208.144:8094/EBTS/publish/announcement/query',
                            formdata=self.fake_data_type_second(project_city[0:-1], page),
                            callback=self.handle_response,
                            dont_filter=True,
                            headers={
                                "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3823.400 QQBrowser/10.7.4307.400"
                            },
                            meta={
                                'page_count': page_count,
                                'page': page,
                                'construction_type': response.meta['construction_type'],
                                'need_break': False,
                                'project_city': project_city,
                                # 'city_value': response.meta['city_value'],
                                # 'value': response.meta['value'],
                                # 'info_type': response.meta['info_type'],
                            },
                        )



    def parse_item(self, response):
        item = JiangSuShengGongLuShuiLuJianSheShiChangXinYongXinXiFuWuXiTongItem()
        item['html'] = response.text
        if "first_url" in response.meta:
            item["first_url"] = response.meta['first_url']
        item["announcement_title"] = response.meta['announcement_title']
        item["release_time"] = response.meta["release_time"]
        item['origin_url'] = response.meta['origin_url']
        item["construction_type"] = response.meta['construction_type']
        if "info_type" in response.meta:
            item['info_type'] = response.meta['info_type']
        item['is_parsed'] = 0
        item['source_type'] = self.name_zh
        item['province'] = self.province
        item['project_city'] = response.meta['project_city']
        yield item
