#!/usr/bin/env python3
"""
演示如何向FileUpload表中插入记录的示例脚本
包括设置新增的三种分析状态字段
"""
import os
import sys
import datetime

# 添加项目根目录到Python路径
sys.path.append(os.path.dirname(os.path.abspath(__file__)))

# 导入项目的数据库和模型
from app.core.database import SessionLocal, engine
from app.models.upload import FileUpload, Base

def create_demo_record():
    """创建示例记录并插入到数据库"""
    print("开始插入示例记录...")
    
    # 确保数据库表存在
    Base.metadata.create_all(bind=engine)
    
    # 创建数据库会话
    db = SessionLocal()
    
    try:
        # 创建一个新的FileUpload对象
        new_upload = FileUpload(
            filename="demo_2024_10_19_120000.csv",
            original_filename="演示文件.csv",
            file_path="/uploads/data/demo_2024_10_19_120000.csv",
            file_size=102400,  # 100KB
            file_type="text/csv",
            # upload_time会自动设置为当前时间
            status="success",
            # 设置三种分析状态
            departure_retention_status="未分析",  # 可以设置为：未分析、已分析、分析中
            background_activity_status="未分析",
            anomaly_detection_status="未分析"
        )
        
        # 将对象添加到数据库会话
        db.add(new_upload)
        
        # 提交会话，将更改保存到数据库
        db.commit()
        
        # 刷新对象以获取自增的ID和自动设置的时间
        db.refresh(new_upload)
        
        print(f"\n示例记录插入成功！")
        print(f"记录ID: {new_upload.id}")
        print(f"原始文件名: {new_upload.original_filename}")
        print(f"上传时间: {new_upload.upload_time}")
        print(f"离场/停留识别状态: {new_upload.departure_retention_status}")
        print(f"背景/活动区分状态: {new_upload.background_activity_status}")
        print(f"异常检测状态: {new_upload.anomaly_detection_status}")
        
        return new_upload
        
    except Exception as e:
        # 如果发生错误，回滚事务
        db.rollback()
        print(f"\n插入记录失败: {e}")
    finally:
        # 关闭数据库会话
        db.close()

def batch_insert_example():
    """批量插入多个记录的示例"""
    print("\n演示批量插入记录...")
    
    db = SessionLocal()
    try:
        # 创建多个FileUpload对象
        uploads = [
            FileUpload(
                filename=f"batch_demo_{i}.csv",
                original_filename=f"批量文件{i}.csv",
                file_path=f"/uploads/data/batch_demo_{i}.csv",
                file_size=51200 + i * 1024,
                file_type="text/csv",
                status="success",
                departure_retention_status="未分析" if i % 3 == 0 else "分析中" if i % 3 == 1 else "已分析",
                background_activity_status="未分析" if i % 3 == 0 else "分析中" if i % 3 == 1 else "已分析",
                anomaly_detection_status="未分析" if i % 3 == 0 else "分析中" if i % 3 == 1 else "已分析"
            )
            for i in range(3)
        ]
        
        # 批量添加到数据库
        db.add_all(uploads)
        db.commit()
        
        print(f"批量插入成功，共 {len(uploads)} 条记录")
        
    except Exception as e:
        db.rollback()
        print(f"批量插入失败: {e}")
    finally:
        db.close()

def update_status_example(record_id):
    """更新记录状态的示例"""
    print(f"\n演示更新记录ID {record_id} 的状态...")
    
    db = SessionLocal()
    try:
        # 查找要更新的记录
        record = db.query(FileUpload).filter(FileUpload.id == record_id).first()
        
        if record:
            # 更新三种分析状态
            record.departure_retention_status = "已分析"
            record.background_activity_status = "已分析"
            record.anomaly_detection_status = "已分析"
            
            # 提交更改
            db.commit()
            print("状态更新成功！")
        else:
            print(f"未找到ID为 {record_id} 的记录")
            
    except Exception as e:
        db.rollback()
        print(f"更新失败: {e}")
    finally:
        db.close()

def main():
    """主函数"""
    print("=" * 60)
    print("FileUpload表插入示例")
    print("=" * 60)
    
    # 创建单个示例记录
    new_record = create_demo_record()
    
    # 批量插入示例
    batch_insert_example()
    
    # 如果成功创建了记录，演示更新状态
    if new_record:
        update_status_example(new_record.id)
    
    print("\n" + "=" * 60)
    print("演示完成！您可以运行 query_uploads.py 查看所有记录")
    print("=" * 60)

if __name__ == "__main__":
    main()