import logging

from qfang.items import *
from qfang.pipelines.Sql import Sql


# class CrawlUrlPipeLine(object):
#     def process_item(self, item, spider):
#         if isinstance(item, CrawlUrlItem):
#             ret = Sql.select_by_id_date('crawl_url', item['id'], item['crawl_date'])
#             if ret[0] == 1:
#                 print('已存在' + item['url'])
#                 logging.info('已存在' + item['url'])
#                 pass
#             else:
#                 print('开始存储：' + item['url'])
#                 logging.info('开始存储：' + item['url'])
#                 Sql.insert("crawl_url", item)


# class SellingAPTPipeLine(object):
#     def process_item(self, item, spider):
#         if isinstance(item, SellingAPTItem):
#             # ret = Sql.select_by_id_date('selling_apt_record', item['id'], item['crawl_date'])
#             # if ret[0] == 1:
#             #     print('已存在' + item['url'])
#             #     logging.info('已存在' + item['url'])
#             #     pass
#             # else:
#             print('开始存储：')
#             logging.info('开始存储：')
#             Sql.insert("houses_housedata", item)
            # Sql.update_crawl_url_status(1, item['id'], item['crawl_date'])


# class DealAPTPipeLine(object):
#     def process_item(self, item, spider):
#         if isinstance(item, DealAPTItem):
#             ret = Sql.select_by_id_date('deal_apt', item['id'], item['crawl_date'])
#             if ret[0] == 1:
#                 print('已存在' + item['url'])
#                 logging.info('已存在' + item['url'])
#                 pass
#             else:
#                 print('开始存储：' + item['url'])
#                 logging.info('开始存储：' + item['url'])
#                 Sql.insert("selling_apt_record", item)


import pymysql.cursors
from twisted.enterprise import adbapi


# class CrawlUrlPipeline(object):
#     def __init__(self, dbpool):
#         self.dbpool = dbpool
#
#     @classmethod
#     def from_settings(cls, settings):
#         dbparms = dict(
#             host=settings["MYSQL_HOST"],
#             db=settings["MYSQL_DBNAME"],
#             user=settings["MYSQL_USER"],
#             passwd=settings["MYSQL_PASSWORD"],
#             charset='utf8',
#             cursorclass=pymysql.cursors.DictCursor,
#             use_unicode=True,
#         )
#         dbpool = adbapi.ConnectionPool("pymysql", **dbparms)
#
#         return cls(dbpool)
#
#     def process_item(self, item, spider):
#         # 使用twisted将mysql插入变成异步执行
#         # item = copy.deepcopy(item)
#         query = self.dbpool.runInteraction(self.do_insert, item)
#         query.addErrback(self.handle_error)  # 处理异常
#
#         return item
#
#     def handle_error(self, failure):
#         # 处理异步插入的异常
#         print('-----数据库写入失败' + str(failure))
#
#     def do_insert(self, cursor, item):
#         # 执行具体的插入
#         # 根据不同的item 构建不同的sql语句并插入到mysql中
#         ret = Sql.select_by_id_date('crawl_url', item['id'], item['crawl_date'])
#         if ret[0] == 1:
#             logging.info('已存在' + item['url'])
#             pass
#         else:
#             insert_sql = """
#                         insert into crawl_url
#                         (id, crawl_date, crawl_time, source, url, type, rawurl, rawurl2, rawurl3, rawurl4, status, error_count)
#                         VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
#                     """
#             cursor.execute(insert_sql, (
#                 item['id'], item['crawl_date'], item["crawl_time"], item["source"], item["url"], item["type"],
#                 item["rawurl"], item["rawurl2"], item['rawurl3'], item['rawurl4'], item['status'], item['error_count'],))


class SellingAPTPipeLine(object):
    def __init__(self, dbpool):
        self.dbpool = dbpool

    @classmethod
    def from_settings(cls, settings):
        dbparms = dict(
            host=settings["MYSQL_HOST"],
            db=settings["MYSQL_DBNAME"],
            user=settings["MYSQL_USER"],
            passwd=settings["MYSQL_PASSWORD"],
            charset='utf8',
            cursorclass=pymysql.cursors.DictCursor,
            use_unicode=True,
        )
        dbpool = adbapi.ConnectionPool("pymysql", **dbparms)

        return cls(dbpool)

    def process_item(self, item, spider):
        # 使用twisted将mysql插入变成异步执行
        # item = copy.deepcopy(item)
        query = self.dbpool.runInteraction(self.do_insert, item)
        query.addErrback(self.handle_error)  # 处理异常

        return item

    def handle_error(self, failure):
        # 处理异步插入的异常
        print('-----数据库写入失败' + str(failure))

    def do_insert(self, cursor, item):
        # 执行具体的插入
        # 根据不同的item 构建不同的sql语句并插入到mysql中
        # ret = Sql.select_by_id_date('houses_housedata', item['id'], item['crawl_date'])
        # if ret[0] == 1:
        #     logging.info('已存在' + item['url'])
        #     pass
        # else:
        insert_sql = """
                        insert into houses_housedata
                        (website, times, community_name, title_name, region, area, all_price, one_price, community_price, loss)
                        VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
                    """
        cursor.execute(insert_sql, (
            item['website'], item['times'], item["community_name"], item["title_name"], item["region"], item["area"],
            item["all_price"], item["one_price"], item['community_price'], item['loss']))
