# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html

from pymysql import connect
import time
import random
import datetime, re


class BasePipeline(object):
    """
    管道基类，用于被继承
    此基类的作用是 爬虫开启时连接数据，爬虫关闭时断开数据库，爬虫运行时插入数据
    """

    def open_spider(self, spider):
        """
        开启爬虫，连接数据库
        :param spider: fang爬虫
        :return:
        """
        NAME = spider.settings['DATABASE_NAME']

        USER = spider.settings['DATABASE_USER']

        PASSWORD = spider.settings['DATABASE_PASSWORD']

        HOST = spider.settings['DATABASE_HOST']

        PORT = spider.settings['DATABASE_PORT']

        self.conx = connect(user=USER, password=PASSWORD,
                            host=HOST, port=PORT,
                            database=NAME, charset='utf8')
        self.cs = self.conx.cursor()

    def data_to_db(self, sql):
        """
        插入数据到数据库
        :param sql:
        :return:
        """
        self.cs.execute(sql)
        self.conx.commit()

    def close_spider(self, spider):
        """
        关闭爬虫前，断开数据库的链接
        :param spider: fang爬虫
        :return:
        """
        self.conx.close()


class HouseSpiderPipeline(BasePipeline):
    """
    负责清洗，过滤数据
    """

    def process_item(self, item, spider):
        if spider.name == 'fang':
            # 1. 获取标题
            title = item['title']

            # 2. 获取 地区 户型 平米数 朝向，需要进行数据处理 ['\r\n                整租', '3室2厅', '98㎡', '朝南北\r\n            ']
            rent_type = ''
            rooms = ''
            area = ''
            direction = ''
            if len(item['info_list']) > 0:
                info_list = item['info_list']
                try:
                    # 出租方式
                    rent_type = info_list[0].strip() if len(info_list[0]) > 0 else ''
                except Exception as e:
                    print(title, '未找到出租方式！')
                    rent_type = ''

                # 户型
                rooms = info_list[1] if len(info_list[1]) > 0 else ''

                # 房屋大小
                area = info_list[2].replace('㎡', '') if len(info_list[2]) > 0 else ''

                try:
                    # 朝向
                    direction = info_list[3].strip() if len(info_list[3]) > 0 else ''
                except Exception as e:
                    print(title, '暂无数据！')
                    direction = ''

            # 3. 获取地址， 需要进行处理 ['中原区', '-', '五龙口', '-', '风和日丽家园']
            if len(item['address']) > 0:
                addr_list = item['address']
                address = ''.join(addr_list)
            else:
                address = ''

            # 4. 获取交通数据，需要进行处理
            if len(item['traffic']) > 0:
                traffic_list = item['traffic']
                traffic = ''.join(traffic_list)
                traffic = traffic.replace('。', '')
            else:
                traffic = ''

            # 5. 获取价格
            price = item['price']

            # 6. 房屋亮点
            liangdian = item['liangdian']

            # 7. 周边配套
            peitao = item['peitao']

            # 8. 交通出行
            chuxing = item['chuxing']

            # 9. 配套设施
            # sheshi = '-'.join([ i.strip() for i in item['sheshi'] ]) if len(item['sheshi']) > 0 else ''
            if len(item['sheshi']) > 0:
                sheshi_list = []
                for i in item['sheshi']:
                    i = i.strip()
                    if i != '':
                        sheshi_list.append(i)
                sheshi = '-'.join(sheshi_list)
            else:
                sheshi = ''

            # 10. url地址
            url = item['url']

            # 11. 获取发布时间, 数据格式化处理
            publish_time = item['pulish_time']
            time_info = publish_time.split(' ')[1]
            t_data = time.strptime(time_info, '%Y-%m-%d')
            publish_time = int(time.mktime(t_data))

            # 12. 为了方便查询，添加地区字段
            region = item['address'][0]
            if '-' in region:
                region = region.replace('-', '')
            if region in ['朝阳', '海淀', '丰台', '东城', '西城', '石景山', '昌平', '大兴', '通州', '顺义', '房山', '密云', '门头沟', '怀柔', '延庆',
                          '平谷']:
                region = region + '区'
            if region not in ['朝阳区', '海淀区', '丰台区', '东城区', '西城区', '石景山区', '昌平区', '大兴区', '通州区', '顺义区', '房山区', '密云区',
                              '门头沟区', '怀柔区', '延庆区', '平谷区']:
                region = '其他'

            # 13. 增加浏览数量
            liulanliang = random.randint(0, 5678)

            # 14. 信息来源网站
            source = 'fangtianxia'

            # 15. 区域
            block = address.rsplit('-', 1)[0]

            # 16. 销售名字
            people_name = item['people_name'][0] if item['people_name'] else ''

            # 17. 电话
            phone_num = item['phone_num'] if item['phone_num'] else ''

            # 18. 房源编号
            if '_' in url:
                house_num = url.split('_')[1]

            # 保存数据到数据库
            sql = "insert into soufang (title, publish_time, region, rent_type, rooms, area, direction, address, traffic, price, liangdian, peitao, chuxing, sheshi, url, liulanliang, source, block, people_name, phone_num, house_num) values ('{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}')".format(
                title, publish_time, region, rent_type, rooms, area, direction, address, traffic, price, liangdian, peitao, chuxing, sheshi, url, liulanliang, source, block, people_name, phone_num, house_num)
            # self.data_to_db(sql)
            print("-" * 10)
            print('《{}》保存完毕！'.format(title))
            print("-" * 10)

        if spider.name == 'anjuke':

            region = item['region']
            source = item['source']
            title = item['title']
            url = item['url']
            rooms = item['rooms']
            area = item['area']
            block = item['block']
            traffic = item['traffic']
            address = item['address']
            rent_type = item['rent_type']
            direction = item['direction']
            price = item['price']
            publish_time = item['publish_time']
            sheshi = item['house_setup']
            liulanliang = random.randint(0, 5678)
            liangdian = ''
            peitao = ''
            chuxing = ''

            sql = "insert into soufang (title, publish_time, region, rent_type, rooms, area, direction, address, traffic, price, liangdian, peitao, chuxing, sheshi, url, liulanliang, source, block) values ('{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}')".format(
                title, publish_time, region, rent_type, rooms, area, direction, address, traffic, price, liangdian,
                peitao, chuxing, sheshi, url, liulanliang, source, block)
            # self.data_to_db(sql)
            print("-" * 10)
            print('《{}》保存完毕！'.format(title))

        if spider.name == '58tongcheng':
            # 标题
            title = item['title']
            # 户型
            rooms = item['title'].split(' ')[-1:][0]
            # 面积
            area = item['area'].replace('m²', '') if len(item['area']) > 0 else ''
            # 价格
            price = item['price']
            # 出租类型
            rent_type = item['title'].split('|')[0].strip()
            # 交通
            traffic = item['traffic']
            # 区域
            local = item['region']
            local_dict = {'chaoyang': '朝阳区', 'haidian': '海淀区', 'fengtai': '丰台区', "dongcheng": '东城区', "xicheng": '西城区',
                          'shijingshan': '石景山区', 'changping': '昌平区', 'daxing': '大兴区', 'tongzhouqu': '通州区',
                          'shunyi': '顺义区',
                          'fangshan': '房山区', 'miyun': '密云区',
                          'mentougou': '门头沟区', 'huairou': '怀柔区', 'yanqing': '延庆区', 'pingu': '平谷区'}
            region = local_dict.get(local)
            # 地址
            address_list = [i for i in item['address'].split('  ')]
            address_list = [region[:-1]] + address_list
            address = '-'.join(address_list)
            # 街道-小区
            block = '-'.join(address.split('-')[1:]) if len(address.split('-')) > 2 else address.split('-')
            # 数据来源
            source = '58tongcheng'
            # 配套设施
            if len(item['sheshi']) > 0:
                sheshi_list = []
                for i in item['sheshi']:
                    i = i.strip()
                    if i != '':
                        sheshi_list.append(i)
                sheshi = '-'.join(sheshi_list)
                if 'WIFI' in sheshi:
                    sheshi = sheshi.replace('WIFI', '宽带')
            else:
                sheshi = ''
            # 发布时间
            publish_time = item['publish_time']
            if '天' in publish_time:
                beforeOfDay = int(re.findall(r'(\d+).*?', publish_time)[0])
            today = datetime.datetime.now()
            offset = datetime.timedelta(days=-beforeOfDay)
            re_date = (today + offset).strftime('%Y-%m-%d')
            t_data = time.strptime(re_date, '%Y-%m-%d')
            publish_time = int(time.mktime(t_data))
            # 电话
            phone_num = item['phone_num']
            # url
            url = item['url']
            # 亮点
            liangdian = item['liangdian'].strip()
            # 增加浏览数量
            liulanliang = random.randint(0, 5678)


            sql = "insert into soufang (title, publish_time, region, rent_type, rooms, area, address, traffic, price, liangdian, sheshi, url, liulanliang, source, block) values ('{}', {}', '{}', '{}', '{}', '{}', {}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}')".format(
                title, publish_time, region, rent_type, rooms, area, address, traffic, price, liangdian, sheshi, url, liulanliang, source, block)
            # self.data_to_db(sql)
            print("-" * 10)
            print('《{}》保存完毕！'.format(title))

        return item