# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
import json
import pymysql
from itemadapter import ItemAdapter
from scrapy.exceptions import DropItem
from scrapy.utils.project import get_project_settings
from scrapy.exceptions import CloseSpider


class JsonWriterPipeline:
    def open_spider(self, spider):
        self.file = open('xian_poi.json', 'w', encoding='utf-8')
        self.file.write("[\n")  # 开始JSON数组
        self.count = 0

    def close_spider(self, spider):
        self.file.write("\n]")  # 结束JSON数组
        self.file.close()
        spider.logger.info(f"总共保存了 {self.count} 条数据到JSON文件")

    def process_item(self, item, spider):
        if self.count > 0:
            self.file.write(",\n")
        line = json.dumps(dict(item), ensure_ascii=False)
        self.file.write(line)
        self.count += 1
        return item


class MySQLPipeline:
    def __init__(self):
        self.settings = get_project_settings()
        self.conn = None
        self.cursor = None
        self.processed_uids = set()  # 用于去重

    def open_spider(self, spider):
        try:
            self.conn = pymysql.connect(
                host=self.settings.get('MYSQL_HOST'),
                user=self.settings.get('MYSQL_USER'),
                password=self.settings.get('MYSQL_PASSWORD'),
                db=self.settings.get('MYSQL_DB'),
                charset='utf8mb4',
                cursorclass=pymysql.cursors.DictCursor
            )
            self.cursor = self.conn.cursor()
            self.create_table()
        except Exception as e:
            spider.logger.error(f"MySQL连接失败: {e}")
            raise CloseSpider("MySQL连接失败")

    def create_table(self):
        self.cursor.execute("""
        CREATE TABLE IF NOT EXISTS xian_poi (
            id INT AUTO_INCREMENT PRIMARY KEY,
            name VARCHAR(255),
            lng DECIMAL(10,6),
            lat DECIMAL(10,6),
            address TEXT,
            province VARCHAR(100),
            city VARCHAR(100),
            area VARCHAR(100),
            street_id VARCHAR(100),
            telephone VARCHAR(100),
            detail INT,
            uid VARCHAR(100),
            tag TEXT,
            type VARCHAR(100),
            overall_rating FLOAT,
            comment_num INT,
            query_category VARCHAR(50),  -- 新增查询分类字段
            poi_category VARCHAR(50),    -- 用于存储清洗后的分类
            created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
            UNIQUE KEY (uid)
        ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4
        """)
        self.conn.commit()

    def process_item(self, item, spider):
        # 基于UID去重
        if item['uid'] in self.processed_uids:
            raise DropItem(f"重复数据: {item['uid']}")

        self.processed_uids.add(item['uid'])

        try:
            self.cursor.execute("""
            INSERT INTO xian_poi (
                name, lng, lat, address, province, city, area, 
                street_id, telephone, detail, uid, tag, type, 
                overall_rating, comment_num, query_category
            ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
            ON DUPLICATE KEY UPDATE
                name=VALUES(name),
                address=VALUES(address),
                telephone=VALUES(telephone)
            """, (
                item['name'], item['lng'], item['lat'], item['address'],
                item['province'], item['city'], item['area'],
                item['street_id'], item['telephone'], item['detail'],
                item['uid'], item['tag'], item['type'],
                item.get('overall_rating'), item.get('comment_num'),
                item['query_category']
            ))
            self.conn.commit()
        except Exception as e:
            spider.logger.error(f"MySQL插入失败: {e}")
            self.conn.rollback()
            raise DropItem(f"MySQL插入失败: {e}")

        return item

    def close_spider(self, spider):
        if self.conn:
            # 更新POI分类
            self.cursor.execute("""
            UPDATE xian_poi 
            SET poi_category = CASE
                WHEN tag LIKE '%景点%' OR tag LIKE '%风景区%' OR tag LIKE '%旅游区%' THEN '景点'
                WHEN tag LIKE '%博物馆%' OR tag LIKE '%遗址%' OR tag LIKE '%古迹%' THEN '文物古迹'
                WHEN tag LIKE '%公园%' OR tag LIKE '%植物园%' THEN '公园'
                WHEN tag LIKE '%游乐园%' OR tag LIKE '%水上乐园%' THEN '游乐园'
                WHEN tag LIKE '%寺庙%' OR tag LIKE '%教堂%' THEN '寺庙'
                WHEN tag LIKE '%动物园%' OR tag LIKE '%水族馆%' THEN '动物园'
                ELSE query_category
            END
            """)
            self.conn.commit()
            self.conn.close()
