import scrapy

import pymysql
from scrapy.utils.project import get_project_settings
import json
class ChinaFailSpider(scrapy.Spider):
    name = 'china_fail'
    allowed_domains = ['gov.cn']
    start_urls = ['http://www.stats.gov.cn/tjsj/tjbz/tjyqhdmhcxhfdm/2019/index.html']

    def parse(self, response):
        settings = get_project_settings()
        db_params = dict(
            host=settings['MYSQL_HOST'],
            user=settings['MYSQL_USER'],
            password=settings['MYSQL_PASSWORD'],
            port=settings['MYSQL_PORT'],
            database=settings['MYSQL_DBNAME'],
            charset=settings['MYSQL_CHARSET'],
            use_unicode=True,

        )
        con = pymysql.connect(**db_params)
        try:
            with con.cursor() as cursor:
                sql = "select * from fail_record"
                cursor.execute(sql)
                result = cursor.fetchall()
        finally:
            con.close()

        for getval in result:
            #看数据库存储 基本只有 1  就是只有县会返回错误

            if getval[2] == 1:
                yield scrapy.Request(getval[0], callback=self.parse_three, errback=self.err_http_back_county,
                                     cb_kwargs=dict(parent_city=json.loads(getval[1])))
            elif getval[2] == 2:
                yield scrapy.Request(getval[0], callback=self.parse_two, errback=self.err_http_back_city,
                                     cb_kwargs=dict(parent_city=json.loads(getval[1])))
            else:
                pass

 #获取城市
    def parse_two(self, response, parent_province):

        city_item = []
        try:
            for city in response.css('tr.citytr'):
                city_item_now = [city.css('td a::text')[0].get(), 1, parent_province[0], city.css('td a::text')[1].get()]
                city_item.append(city_item_now)
                #如何是城市错误，下面的还有县
                yield scrapy.Request(response.urljoin(city.css('*::attr("href")')[0].get()), callback=self.parse_three_city,errback=self.err_http_back_county, cb_kwargs=dict(parent_city=city_item_now))
            yield {'save_data': city_item, 'table': 'delete_record', 'del_fail_record': response.url}
        except :
            pass
    #获取县
    def parse_three_city(self, response, parent_city):
        county_item = []
        #此位置经常爬取不到数据  ，我们存储爬取不到数据,等待另外爬虫单独爬
        try:
            for county in response.css('tr.countytr'):
                # 第一行有点不一样
                if county.xpath('td/a/text()').get() is None:
                    county_brigt = [county.css('td::text')[0].get(), 2, parent_city[0],
                                    county.css('td::text')[1].get()]
                else:
                    county_brigt = [county.css('td a::text')[0].get(), 2, parent_city[0],
                                    county.css('td a::text')[1].get()]
                county_item.append(county_brigt)
            yield {'save_data': county_item}
        except:
            fail_item = [response.url, parent_city, 1]
            yield {'save_data': fail_item, 'table': 'fail'}


    #获取县
    def parse_three(self, response, parent_city):
        county_item = []
        #此位置经常爬取不到数据  ，我们存储爬取不到数据,等待另外爬虫单独爬
        try:
            for county in response.css('tr.countytr'):
                #第一行有点不一样
                if county.xpath('td/a/text()').get() is None:
                    county_brigt = [county.css('td::text')[0].get(), 2, parent_city[0],
                                    county.css('td::text')[1].get()]
                else:
                    county_brigt = [county.css('td a::text')[0].get(), 2, parent_city[0], county.css('td a::text')[1].get()]
                county_item.append(county_brigt)
            yield {'save_data': county_item, 'table': 'delete_record', 'del_fail_record': response.url}
        except:
           pass

    def err_http_back_city(self, failure):
        request = failure.request
        fail_item = [request.url, request.cb_kwargs['parent_province'], 2]
        yield {'save_data': fail_item, 'table': 'fail'}
    def err_http_back_county(self, failure):
        request = failure.request
        fail_item = [request.url, request.cb_kwargs['parent_city'], 1]
        yield {'save_data': fail_item, 'table': 'fail'}