# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


import hashlib


# useful for handling different item types with a single interface


class LandchinaCrawlspiderPipeline:
    def process_item(self, item, spider):
        return item


class LandchinaCrawlspiderWriteToCsvPipeline:
    def process_item(self, item, spider):
        division = item['division']
        location = item['location']
        area = item['area']
        purpose = item['purpose']
        price = item['price']
        sign_date = item['sign_date']
        supervision_number = item['supervision_number']
        # 将解析到的数据值生成一个唯一的标识存入redis
        source = item['division'] + item['location'] + item['area'] + item['purpose'] + item['price'] + item[
            'sign_date'] + item['supervision_number']
        land_info_id = hashlib.sha256(source.encode()).hexdigest()
        try:
            ex = spider.red.sadd('land_info_id', land_info_id)
            if ex:
                spider.fd.seek(0)
                res = spider.fd.readline()
                print('有新数据，爬取ing')
                if not res:
                    spider.fd.write(
                        ("%s" + ',' + "%s" + ',' + "%s" + ',' + "%s" + ',' + "%s" + ',' + "%s" + ',' + "%s" + '\r\n') %
                        ("行政区", "项目位置", "面积(公顷)", "土地用途", "成交价格(万元)", "合同签订日期", "电子监管号"))
                else:
                    spider.fd.write(
                        ("%s" + ',' + "%s" + ',' + "%s" + ',' + "%s" + ',' + "%s" + ',' + "%s" + ',' + "%s" + '\r\n') %
                        (division, location, area, purpose, price, sign_date, supervision_number))
            else:
                print('数据无更新！')
        except Exception as e:
            spider.mysql_conn.rollback()
            print(e)
        return item


class LandchinaCrawlspiderMysqlPipeline:
    def process_item(self, item, spider):
        division = item['division']
        location = item['location']
        area = item['area']
        purpose = item['purpose']
        price = item['price']
        sign_date = item['sign_date']
        supervision_number = item['supervision_number']
        sql = 'insert into land_detail(division,location,area,purpose,price,sign_date,supervision_number) ' \
              'values("%s","%s","%s","%s","%s","%s","%s");' % (division, location, area, purpose, price, sign_date,
                                                               supervision_number)

        # 将解析到的数据生成一个唯一的标识存入redis
        source = item['division'] + item['location'] + item['area'] + item['purpose'] + item['price'] + item[
            'sign_date'] + item['supervision_number']
        land_info_id = hashlib.sha256(source.encode()).hexdigest()
        try:
            ex = spider.red.sadd('land_info_id', land_info_id)
            if ex:
                print('有新数据，爬取ing')
                spider.cur.execute(sql)
                spider.mysql_conn.commit()
            else:
                print('数据无更新！')
        except Exception as e:
            spider.mysql_conn.rollback()
            print(e)
        return item

    def close_spider(self, spider):
        print('closed')
        if spider.red:
            spider.red.close()
        if spider.cur:
            spider.cur.close()
            spider.mysql_conn.close()
        if spider.fd:
            spider.fd.close()
        return
