import time
import logging
from app.utils.redis_utils import redis_client, RedisUtils, RedisKeys
from app.extensions import db
from app.models.video import Video

# 配置日志
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger('async_view_count_updater')

class AsyncViewCountUpdater:
    """
    异步观看次数更新器
    从Redis队列中获取视频ID，批量更新数据库中的观看次数
    """
    
    @staticmethod
    def process_queue(batch_size=50, sleep_time=10):
        """
        处理观看次数更新队列
        
        Args:
            batch_size: 每次批量处理的视频数量
            sleep_time: 处理完一批后休眠的时间（秒）
        """
        redis_utils = RedisUtils()
        
        while True:
            try:
                # 从队列中获取一批视频ID
                video_ids = []
                
                # 使用LPOP批量获取视频ID，最多batch_size个
                for _ in range(batch_size):
                    video_id = redis_client.rpop(RedisKeys.VIEW_COUNT_UPDATE_QUEUE)
                    if video_id:
                        video_ids.append(video_id.decode('utf-8'))
                    else:
                        break
                
                # 如果有视频ID需要处理
                if video_ids:
                    logger.info(f"Processing {len(video_ids)} video view counts")
                    # 调用同步方法将观看次数更新到数据库
                    redis_utils.sync_view_count_to_db(video_ids)
                    logger.info(f"Successfully updated {len(video_ids)} video view counts")
                
                # 休眠指定时间
                time.sleep(sleep_time)
                
            except Exception as e:
                logger.error(f"Error processing view count queue: {str(e)}")
                # 出错后休眠一段时间再重试
                time.sleep(sleep_time)

if __name__ == "__main__":
    # 如果直接运行此脚本，则开始处理队列
    logger.info("Starting async view count updater")
    AsyncViewCountUpdater.process_queue()
