from concurrent.futures import ThreadPoolExecutor
import time
import redis
import logging
import pymongo
from bson import ObjectId
from datetime import datetime



class IncreamentDatas:
    def __init__(self, host="47.96.157.204", port=6379, db=0):
        self.r = redis.Redis(host=host, port=port, db=db)
        self.mongo_client = pymongo.MongoClient(
            host='139.196.92.103',      # MongoDB 服务器地址
            port=27017,                 # MongoDB 服务器端口
            username='root',            # 数据库用户名
            password='wby6225104',      # 数据库密码
            maxPoolSize=50,             # 连接池最大连接数，提高并发处理能力
            socketTimeoutMS=30000,      # 套接字超时时间(毫秒)，防止长时间阻塞
            compressors=['zlib'],       # 使用 zlib 压缩数据传输，减少网络带宽
            zlibCompressionLevel=9,     # zlib 压缩级别(1-9)，9为最高压缩比
            
            # 其他可选的优化参数：
            # connectTimeoutMS=20000,   # 连接超时时间(毫秒)
            # maxIdleTimeMS=300000,     # 连接池中连接的最大空闲时间
            # waitQueueTimeoutMS=10000, # 连接池队列等待超时时间
            # retryWrites=True,         # 启用写操作重试机制
            # w='majority'              # 写入确认级别，确保数据写入大多数节点
        )
        self.mongo_col = self.mongo_client['jx3_pintuan']['jx3_trade_data']
        self.target_col = self.mongo_client['jx3_pintuan']['jx3_all_data22']
        self.MAX_WORKERS = 4  # 线程池最大线程数
        self.BATCH_SIZE = 40000  # 每批次处理的数据量
        # redis中last_id的key
        self.last_id_key = 'jx3_all_data_last_id'
        
        # 确保创建唯一索引
        try:
            self.target_col.create_index(
                [("itemBase.item.itemId", pymongo.ASCENDING)], 
                unique=True,
                background=True  # 后台创建索引，不阻塞其他操作
            )
        except Exception as e:
            logging.warning(f"创建唯一索引时出错（如果已存在可以忽略）: {str(e)}")
        
        
    def increments_update_items(self):
        try:
            # 获取上次处理的最后一个id
            last_id = self.get_redis_last_id()
            
            while True:
                # 获取一批数据的游标
                if last_id:
                    items = self.get_data_batch(last_id)
                else:
                    items = self.get_data_batch(None)
                # 如果没有新数据，退出循环
                if not items:  
                    break

                chunks = [items[i:i+self.BATCH_SIZE] for i in range(0,len(items),self.BATCH_SIZE)]

                processed_count = 0
                # 使用线程池并行处理数据块
                with ThreadPoolExecutor(max_workers=self.MAX_WORKERS) as executor:
                    futures = [executor.submit(self._process_chunk,chunk) for chunk in chunks]
                    results = [future.result() for future in futures]

                    for result in results:
                        processed_count += result['processed']
                # 更新最后处理的 ID - 使用整个批次中的最后一个ID
                last_id = items[-1]['_id']
                self.r.set('last_id', str(last_id))

                 # 判断是否还有更多数据需要处理
                 # 使用查询返回的数据量判断
                if len(items) < self.BATCH_SIZE:  
                    break
                    
        except Exception as e:
            logging.error(f"增量更新失败: {str(e)}")
            
    def _process_chunk(self, items):
        logging.info(f"处理数据块: {len(items)} 条")
        try:
            processed_count = 0
            current_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
            
            # 使用批量写入操作
            bulk_operations = []
            
            for item in items:
                item_id = item['attrs']['item_index']
                update_doc = {
                    "$setOnInsert": {
                        "itemBase": {
                            "icon": item['thumb'],
                            "item": {
                                "itemId": item_id,
                                "itemType": item['attrs']['item_type']
                            },
                            "typeName": item['attrs']['appearance_type_name'],
                            "info": item['info']
                        },
                        "createTime": current_time,
                        "updateTime": current_time,
                        "deleted": False
                    }
                }
                
                # 使用 update_one 配合 upsert
                bulk_operations.append(
                    pymongo.UpdateOne(
                        {"itemBase.item.itemId": item_id},
                        update_doc,
                        upsert=True
                    )
                )
            
            if bulk_operations:
                try:
                    # 执行批量操作
                    result = self.target_col.bulk_write(
                        bulk_operations,
                        ordered=False  # 非顺序执行，提高性能
                    )
                    
                    # 统计结果
                    inserted = result.upserted_count
                    matched = result.matched_count
                    
                    logging.info(f"批量处理结果: 新插入 {inserted} 条, 已存在 {matched} 条")
                    processed_count = inserted + matched
                    
                except pymongo.errors.BulkWriteError as bwe:
                    # 处理部分失败的情况
                    successful_ops = len(bulk_operations) - len(bwe.details['writeErrors'])
                    logging.warning(f"部分操作失败: 成功 {successful_ops} 条, 失败 {len(bwe.details['writeErrors'])} 条")
                    processed_count = successful_ops
                    
            return {
                "processed": processed_count
            }
            
        except Exception as e:
            logging.error(f"处理数据块失败: {str(e)}")
            return {
                "processed": 0,
                "last_id": None
            }

    def get_data_batch(self, last_id):
        """获取一批数据的游标"""
        query = {"_id": {"$gt": last_id}} if last_id else {}
        start_time = time.time()
        items = self.mongo_col.find(query,{
            "_id": 1,
            "thumb": 1,
            "attrs.item_index": 1,
            "attrs.item_type": 1,
            "attrs.appearance_type_name": 1,
            "attrs.appearance_point_rewards": 1,
            "info": 1,
            "single_unit_price":1
        }).limit(self.BATCH_SIZE).batch_size(10000)
        end_time = time.time()
        logging.info(f"起始id是{last_id} 获取数据耗时: {end_time - start_time} 秒")

        return list(items)

    def get_redis_last_id(self):
        """获取 Redis 中存储的最后处理的 ID"""
        last_id = self.r.get(self.last_id_key)
        return ObjectId(last_id.decode("utf-8")) if last_id else None
    
    def close(self):
        """关闭数据库连接"""
        self.mongo_client.close()
        self.r.close()

if __name__ == "__main__":
    increament_datas = IncreamentDatas()
    logging.info(f'开始增量更新,redis中last_id的key是{increament_datas.last_id_key},开始时间{datetime.now().strftime("%Y-%m-%d %H:%M:%S")}')
    increament_datas.increments_update_items()
    logging.info(f'增量更新完成,结束时间{datetime.now().strftime("%Y-%m-%d %H:%M:%S")}')
    increament_datas.close()


