from ast import Dict
import mysql.connector
from mysql.connector import Error
import logging
import time
import json
from datetime import datetime

# 配置日志
logger = logging.getLogger('database')

def is_first_chunk_index(chunk_index):
    """
    判断是否为第一个分片
    支持多种格式：A001, A1, 001, 1等
    """
    if not chunk_index:
        return False
    
    # 移除字母前缀，提取数字部分
    import re
    numbers = re.findall(r'\d+', chunk_index)
    if numbers:
        # 取第一个数字序列
        num = int(numbers[0])
        # 如果数字为1或001等，认为是第一个分片
        return num <= 1
    
    # 如果没有数字，检查常见的第一分片标识
    first_chunk_patterns = ['A001', 'A1', '001', '1', 'FIRST']
    return chunk_index.upper() in first_chunk_patterns

def is_consecutive_chunk_index(prev_chunk, current_chunk):
    """
    验证两个chunk_index是否连续（相差1）
    """
    import re
    prev_numbers = re.findall(r'\d+', prev_chunk)
    current_numbers = re.findall(r'\d+', current_chunk)
    
    if not prev_numbers or not current_numbers:
        logger.warning(f"无法从chunk_index提取数字: prev={prev_chunk}, current={current_chunk}")
        return False
    
    prev_num = int(prev_numbers[0])
    current_num = int(current_numbers[0])
    is_consecutive = (current_num - prev_num) == 1
    
    if not is_consecutive:
        logger.warning(f"chunk_index不连续: {prev_chunk}({prev_num}) -> {current_chunk}({current_num})")
    
    return is_consecutive

def is_time_continuous(prev_end_time, current_start_time, tolerance_seconds=1):
    """
    验证两个分片的时间是否连续
    允许时间差在0-1秒之间
    prev_end_time和current_start_time都是字符串格式的时间戳 YYYYMMDDHHMMSS
    """
    try:
        prev_end_dt = datetime.strptime(prev_end_time, '%Y%m%d%H%M%S')
        current_start_dt = datetime.strptime(current_start_time, '%Y%m%d%H%M%S')
        
        time_diff = (current_start_dt - prev_end_dt).total_seconds()
        is_continuous = 0 <= time_diff <= tolerance_seconds
        
        if not is_continuous:
            logger.warning(
                f"时间不连续: 上一个结束={prev_end_time}, 当前开始={current_start_time}, "
                f"时间差={time_diff:.0f}秒"
            )
        
        return is_continuous
    except ValueError as e:
        logger.error(f"时间格式解析错误: {e}")
        return False

def normalize_time_value(value):
    """将各种格式的时间值转换为标准的YYYYMMDDHHMMSS字符串"""
    if value is None:
        return None

    if isinstance(value, datetime):
        return value.strftime('%Y%m%d%H%M%S')

    if isinstance(value, str):
        s = value.strip()
        if not s:
            return None
        # 全数字格式
        if s.isdigit():
            if len(s) == 14:
                return s
            if len(s) == 12:
                return '20' + s
        # 尝试常见的日期时间格式
        for fmt in ('%Y-%m-%d %H:%M:%S', '%Y/%m/%d %H:%M:%S', '%Y-%m-%dT%H:%M:%S'):
            try:
                dt = datetime.strptime(s, fmt)
                return dt.strftime('%Y%m%d%H%M%S')
            except ValueError:
                continue
        logger.debug(f"无法规范化时间字符串: {s}, 保持原样")
        return s

    # 其他类型转换为字符串再次处理
    return normalize_time_value(str(value))

# 数据库配置
db_config = {
    'host': '36.111.80.114',
    'user': 'inspection',
    'password': '5dz8PWyy7NDGyJRa',  # 替换为你的MySQL密码
    'database': 'inspection'
}
def find_previous_in_time_file(device_no, current_chunk_index, current_file_name):
    """
    递归查找所有连续的上一个分片文件，直到找到包含inTime的文件
    验证规则：
    1. chunk_index必须连续（相差1）
    2. 时间必须连续（上一个分片结束时间 = 当前分片开始时间，允许相差0或1秒）
    3. 如果任一条件不满足，停止查找并返回None
    
    :param device_no: 设备编号
    :param current_chunk_index: 当前分片索引
    :param current_file_name: 当前文件名（用于提取时间）
    :return: 连续的文件列表（从旧到新排序）或None
    """
    # 如果是第一个分片，直接返回None
    if is_first_chunk_index(current_chunk_index):
        logger.info(f"第一个分片 {current_chunk_index}，无需查找上一个文件")
        return None
    
    connection = None
    files_to_merge = []
    
    try:
        connection = mysql.connector.connect(**db_config)
        cursor = connection.cursor(dictionary=True)
        
        # 从数据库查询当前文件的开始时间
        query_current = """
        SELECT start_time 
        FROM qa_sound_record 
        WHERE device_no = %s AND file_name = %s
        ORDER BY id DESC LIMIT 1
        """
        cursor.execute(query_current, (device_no, current_file_name))
        current_record = cursor.fetchone()
        
        if not current_record or not current_record.get('start_time'):
            logger.error(f"无法从数据库获取当前文件的start_time: {current_file_name}")
            return None
        
        current_start_time = current_record['start_time']
        current_start_time = normalize_time_value(current_start_time)
        if not current_start_time:
            logger.error(f"无法解析当前文件的start_time: {current_file_name}")
            return None
        search_chunk = current_chunk_index
        search_file_name = current_file_name
        search_start_time = current_start_time
        max_iterations = 20  # 防止无限循环
        iteration = 0
        
        logger.info(f"开始递归查找连续分片，起点: {current_chunk_index}, 开始时间: {current_start_time}")
        
        while iteration < max_iterations:
            # 检查是否已经到达第一个分片
            if is_first_chunk_index(search_chunk):
                logger.info(f"已到达第一个分片 {search_chunk}，停止查找")
                break
            
            # 查找上一个分片（按start_time排序，找到时间上最近的一个）
            query = """
            SELECT id, file_name, extended, chunk_index, start_time, end_time, file_path
            FROM qa_sound_record 
            WHERE device_no = %s AND start_time < %s
            ORDER BY start_time DESC LIMIT 1
            """
            cursor.execute(query, (device_no, search_start_time))
            prev_file = cursor.fetchone()
            
            if not prev_file:
                logger.warning(f"未找到 {search_chunk} 的上一个分片，停止查找")
                break
            
            # 验证1：chunk_index连续性
            if not is_consecutive_chunk_index(prev_file['chunk_index'], search_chunk):
                logger.warning(
                    f"chunk_index不连续，丢弃该段会话。已查找文件数: {len(files_to_merge)}"
                )
                return None  # 不连续，返回None
            
            # 验证2：时间连续性（使用数据库中的end_time字段）
            prev_end_time = prev_file['end_time']
            prev_end_time = normalize_time_value(prev_end_time)
            if not prev_end_time:
                logger.error(f"上一个文件缺少或无法解析end_time字段: {prev_file['file_name']}")
                return None

            search_start_time = normalize_time_value(search_start_time)
            if not search_start_time:
                logger.error(f"无法解析当前分片的开始时间: {current_file_name}")
                return None
            
            if not is_time_continuous(prev_end_time, search_start_time):
                logger.warning(
                    f"时间不连续，丢弃该段会话。已查找文件数: {len(files_to_merge)}"
                )
                return None  # 时间不连续，返回None
            
            # 检查这个文件的最后一个元素（适配inTime和outTime分离的格式）
            try:
                extended = json.loads(prev_file['extended'])
                if not extended or not isinstance(extended, list):
                    # extended为空，说明整个文件都是会话的一部分，继续向上查找
                    logger.info(f"{prev_file['chunk_index']} 的extended为空，继续向上查找")
                    files_to_merge.insert(0, prev_file)
                    # 继续循环
                else:
                    # 检查最后一个元素
                    last_item = extended[-1]
                    if isinstance(last_item, dict):
                        has_in_time = last_item.get('inTime') is not None
                        has_out_time = last_item.get('outTime') is not None
                        
                        # 最后一个元素只有inTime，说明会话跨文件
                        if has_in_time and not has_out_time:
                            files_to_merge.insert(0, prev_file)
                            logger.info(
                                f"找到会话起点文件: {prev_file['chunk_index']}，"
                                f"最后一个元素只有inTime，总共需要合并 {len(files_to_merge)} 个文件"
                            )
                            break
                        # 最后一个元素只有outTime，说明所有会话都完整结束，中间有断档
                        elif not has_in_time and has_out_time:
                            logger.warning(
                                f"{prev_file['chunk_index']} 最后一个元素只有outTime，"
                                f"说明所有会话完整，中间有断档，丢弃该段会话"
                            )
                            return None
                        # 最后一个元素既有inTime又有outTime（旧格式兼容）
                        elif has_in_time and has_out_time:
                            logger.warning(
                                f"{prev_file['chunk_index']} 最后一个元素同时有inTime和outTime（旧格式），"
                                f"说明会话完整，中间有断档，丢弃该段会话"
                            )
                            return None
                        # 最后一个元素为空（异常情况）
                        else:
                            logger.warning(
                                f"{prev_file['chunk_index']} 最后一个元素为空，继续向上查找"
                            )
                            files_to_merge.insert(0, prev_file)
                            # 继续循环
                        
            except (json.JSONDecodeError, KeyError, TypeError) as e:
                logger.error(f"解析extended失败: {prev_file['chunk_index']}, 错误: {e}")
                return None
            
            # 继续向前查找（使用数据库中的start_time字段）
            search_chunk = prev_file['chunk_index']
            search_file_name = prev_file['file_name']
            search_start_time = normalize_time_value(prev_file['start_time'])
            if not search_start_time:
                logger.error(f"文件缺少或无法解析start_time字段: {prev_file['file_name']}")
                return None
            iteration += 1
        
        if iteration >= max_iterations:
            logger.error(f"查找文件超过最大迭代次数 {max_iterations}，可能存在数据异常")
            return None
        
        if not files_to_merge:
            logger.warning("未找到任何需要合并的文件")
            return None
        
        logger.info(f"成功查找到 {len(files_to_merge)} 个连续的分片文件")
        return files_to_merge
        
    except Error as e:
        logger.error(f"查找连续分片文件错误: {str(e)}", exc_info=True)
        return None
    except Exception as e:
        logger.error(f"查找连续分片文件时发生意外错误: {str(e)}", exc_info=True)
        return None
    finally:
        if connection and connection.is_connected():
            cursor.close()
            connection.close()

def save_to_database(device_no, file_name, chunk_index, start_time, end_time, usr_no, has_body, extended, file_path, file_size, tenant_id='000000', create_dept=None, create_by=None, status='0'):
    """将文件信息保存到数据库"""
    connection = None
    try:
        logger.debug(f"连接数据库: {db_config['host']}/{db_config['database']}")
        connection = mysql.connector.connect(**db_config)
        cursor = connection.cursor()
        
        query = """
        INSERT INTO qa_sound_record 
        (tenant_id, device_no, file_name, chunk_index, start_time, end_time, usr_no, has_body, extended, file_path, file_size, create_dept, create_by, create_time, status, del_flag)
        VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, NOW(), %s, '0')
        """
        data = (tenant_id, device_no, file_name, chunk_index, start_time, end_time, usr_no, has_body, extended, file_path, file_size, create_dept, create_by, status)
        
        logger.debug(f"执行SQL: {query}")
        logger.debug(f"SQL参数: {data}")
        
        cursor.execute(query, data)
        connection.commit()
        logger.info(f"文件信息已保存到数据库: {file_name} (分片 {chunk_index})")
        try:
            return cursor.lastrowid
        except Exception:
            return None
        
    except Error as e:
        logger.error(f"数据库操作错误: {str(e)}", exc_info=True)
    except Exception as e:
        logger.error(f"保存到数据库时发生意外错误: {str(e)}", exc_info=True)
    finally:
        if connection and connection.is_connected():
            cursor.close()
            connection.close()
            logger.debug("数据库连接已关闭")

def update_sound_record_status(device_no: str, file_name: str, chunk_index: str, status: str) -> bool:
    """更新 qa_sound_record 的 status 字段。"""
    connection = None
    try:
        connection = mysql.connector.connect(**db_config)
        cursor = connection.cursor()
        query = """
        UPDATE qa_sound_record
        SET status = %s, update_time = NOW()
        WHERE device_no = %s AND file_name = %s AND chunk_index = %s
        """
        cursor.execute(query, (status, device_no, file_name, chunk_index))
        connection.commit()
        return cursor.rowcount > 0
    except Error as e:
        logger.error(f"更新qa_sound_record状态失败: {str(e)}", exc_info=True)
        return False
    finally:
        if connection and connection.is_connected():
            try:
                cursor.close()
            except Exception:
                pass
            connection.close()

def save_session_to_database(session_no,device_no, original_files, session_file, in_time, out_time, is_cross_file=False, conversation_text: str | None = None,  session_desc: str| None = None, key_events: str | None = None) -> bool:
    """
    保存会话信息到 qa_session_info 表。

    兼容旧调用参数，内部映射到新表字段：
    - session_no: 自动生成（基于时间戳）
    - start_time/end_time: 由 in_time/out_time (YYYYMMDDHHMMSS) 转换
    - duration: end - start（秒）
    - source_files: original_files
    - device_id: device_no
    - org_id: 从 qa_sound_device 表获取
    - session_date: 取 start_time 的日期
    - tenant_id: 默认 '000000'
    - status: 默认 '0'
    其他可选评分字段置空。
    """
    connection = None
    try:
        from datetime import datetime

        # 会话文本为空则不保存
        text_str = ""
        if conversation_text is not None:
            text_str = conversation_text if isinstance(conversation_text, str) else str(conversation_text)
            text_str = text_str.strip()
        if not text_str:
            logger.info(f"会话文本为空，跳过保存: session_no={session_no}")
            return False

        # 将 YYYYMMDDHHMMSS 转换为 datetime
        def parse_ts(ts_str):
            try:
                return datetime.strptime(ts_str, '%Y%m%d%H%M%S') if ts_str else None
            except Exception:
                return None

        start_dt = parse_ts(in_time)
        end_dt = parse_ts(out_time)

        if not start_dt or not end_dt or end_dt <= start_dt:
            logger.warning(f"无效的会话时间范围: in={in_time}, out={out_time}")
            return False

        duration = int((end_dt - start_dt).total_seconds())
        if duration < 60:
            logger.info(f"会话时长不足60秒（{duration}秒），跳过保存: session_no={session_no}")
            return False

        session_date = start_dt.date().isoformat()

        # 从 qa_sound_device 表获取设备的 org_id
        device_info = get_device_info(device_no)
        org_id = device_info.get('org_id', 0) if device_info else 0

        connection = mysql.connector.connect(** db_config)
        cursor = connection.cursor()

        query = """
        INSERT INTO qa_session_info (
            session_no,
            start_time,
            end_time,
            duration,
            key_events,
            session_desc,
            conversation_text,
            org_id,
            device_id,
            session_date,
            source_files,
            session_file,
            tenant_id,
            create_dept,
            create_by,
            create_time,
            update_by,
            update_time,
            status,
            del_flag
        ) VALUES (
            %s, %s, %s, %s,
            %s, %s, %s,
            %s, %s,
            %s,
            %s,
            %s,
            %s,
            %s, %s, NOW(),
            %s, NOW(),
            %s,
            %s
        )
        """

        data = (
            session_no,
            start_dt.strftime('%Y-%m-%d %H:%M:%S'),
            end_dt.strftime('%Y-%m-%d %H:%M:%S'),
            duration,
            key_events,  # key_events 从参数传入（格式化后的events字符串）
            session_desc,  # session_desc 记录来源
            conversation_text,  # conversation_text
            org_id,
            device_no,
            session_date,
            original_files,
            session_file,
            '000000',  # tenant_id
            None,  # create_dept
            None,  # create_by
            None,  # update_by
            '0',   # status 正常
            '0'    # del_flag 未删除
        )

        cursor.execute(query, data)
        connection.commit()
        logger.info(f"会话信息已保存到 qa_session_info: {session_no}")
        return True

    except Error as e:
        logger.error(f"会话数据库操作错误: {str(e)}", exc_info=True)
        return False
    except Exception as e:
        logger.error(f"保存会话到数据库时发生意外错误: {str(e)}", exc_info=True)
        return False
    finally:
        if connection and connection.is_connected():
            cursor.close()
            connection.close()

def get_session_by_device(device_no, start_time=None, end_time=None):
    """
    根据设备编号查询会话信息
    :param device_no: 设备编号
    :param start_time: 开始时间（可选）
    :param end_time: 结束时间（可选）
    :return: 会话信息列表
    """
    connection = None
    try:
        connection = mysql.connector.connect(**db_config)
        cursor = connection.cursor(dictionary=True)
        
        # 新表无 device_no 字段，使用 source_files 模糊匹配设备号前缀
        query = "SELECT * FROM qa_session_info WHERE source_files LIKE %s"
        params = [f"%{device_no}%"]

        if start_time and end_time:
            query += " AND start_time BETWEEN %s AND %s"
            params.extend([start_time, end_time])
        elif start_time:
            query += " AND start_time >= %s"
            params.append(start_time)
        elif end_time:
            query += " AND start_time <= %s"
            params.append(end_time)

        query += " ORDER BY start_time ASC"
        
        cursor.execute(query, params)
        results = cursor.fetchall()
        return results
        
    except Error as e:
        logger.error(f"查询会话信息错误: {str(e)}", exc_info=True)
        return []
    except Exception as e:
        logger.error(f"查询会话时发生意外错误: {str(e)}", exc_info=True)
        return []
    finally:
        if connection and connection.is_connected():
            cursor.close()
            connection.close()

def get_session_by_file(original_file):
    """
    根据原始文件名查询相关会话
    :param original_file: 原始文件名
    :return: 会话信息列表
    """
    connection = None
    try:
        connection = mysql.connector.connect(** db_config)
        cursor = connection.cursor(dictionary=True)
        
        # 查询包含该文件的所有会话
        query = """
        SELECT * FROM qa_session_info
        WHERE source_files LIKE %s
        ORDER BY start_time ASC
        """
        params = [f"%{original_file}%"]
        
        cursor.execute(query, params)
        results = cursor.fetchall()
        return results
    except Error as e:
        logger.error(f"根据文件查询会话错误: {str(e)}", exc_info=True)
        return []
    except Exception as e:
        logger.error(f"根据文件查询会话发生意外错误: {str(e)}", exc_info=True)
        return []
    finally:
        if connection and connection.is_connected():
            try:
                cursor.close()
            except Exception:
                pass
            connection.close()

def update_conversation_text_by_session_no(session_no: str, conversation_text: str) -> bool:
    """按 session_no 精确更新 qa_session_info 的 conversation_text 字段。"""
    connection = None
    try:
        connection = mysql.connector.connect(**db_config)
        cursor = connection.cursor()

        query = """
        UPDATE qa_session_info
        SET conversation_text = %s, update_time = NOW()
        WHERE session_no = %s
        """
        params = (conversation_text, session_no)
        cursor.execute(query, params)
        connection.commit()
        rowcount = cursor.rowcount
        logger.info(f"更新会话conversation_text完成，session_no: {session_no}, 影响行数: {rowcount}")
        return rowcount > 0
    except Error as e:
        logger.error(f"更新conversation_text时数据库错误: {str(e)}", exc_info=True)
        return False
    except Exception as e:
        logger.error(f"更新conversation_text时发生意外错误: {str(e)}", exc_info=True)
        return False
    finally:
        if connection and connection.is_connected():
            try:
                cursor.close()
            except Exception:
                pass
            connection.close()

def save_session_score(session_no: str, score_data: Dict):
    """保存会话评分到qa_session_score表"""
    connection = None
    try:
        # 转换问题解决状态
        resolved_map = {"yes": 2, "partial": 1, "no": 0}
        resolved_status = resolved_map.get(score_data.get("resolved"), 0)
        
        # 处理 events：提取 event_type 和 count 并格式化为 "事件内容 | 次数"
        events_list = score_data.get("events", [])
        processed_events = []
        for event in events_list:
            event_type = event.get("event_type", "")
            count = event.get("count", "")
            # 拼接为 "事件内容 | 次数" 格式（忽略空值）
            if event_type and count:
                processed_events.append(f"{event_type} | {count}")

        # 用逗号分隔多个事件，空列表则返回空字符串
        events_str = ", ".join(processed_events) if processed_events else ""

        # 准备数据
        data = (
            session_no,
            json.dumps(score_data.get("business_tags"),ensure_ascii=False),
            score_data["scores"].get("业务处理能力"),
            score_data["rationales"].get("业务处理能力"),
            score_data["scores"].get("服务态度"),
            score_data["rationales"].get("服务态度"),
            score_data["scores"].get("沟通能力"),
            score_data["rationales"].get("沟通能力"),
            score_data["scores"].get("问题是否解决"),
            score_data["rationales"].get("问题是否解决"),
            score_data["scores"].get("业务引导"),
            score_data["rationales"].get("业务引导"),
            score_data.get("final_score"),
            resolved_status,
            json.dumps(score_data.get("evidence", []), ensure_ascii=False),
            json.dumps(score_data.get("guidance_hits", []),ensure_ascii=False),
            events_str,
            json.dumps(score_data.get("business_guidance_details", {}),ensure_ascii=False)      
            )
        
        connection = mysql.connector.connect(** db_config)
        cursor = connection.cursor()
        query = """
        INSERT INTO qa_session_score (
            session_no, business_tags, business_ability, business_ability_rationale,
            service_attitude, service_attitude_rationale, communication_ability,
            communication_ability_rationale, problem_resolved, problem_resolved_rationale,
            business_guidance, business_guidance_rationale, final_score, resolved_status,
            evidence, guidance_hits, events, business_guidance_details
        ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
        """
        cursor.execute(query, data)
        connection.commit()
        logger.info(f"会话评分已保存: {session_no}")
        
    except Error as e:
        logger.error(f"评分数据库操作错误: {str(e)}", exc_info=True)
    except Exception as e:
        logger.error(f"保存会话评分失败: {str(e)}", exc_info=True)
    finally:
        if connection and connection.is_connected():
            cursor.close()
            connection.close()

def update_device_status_on_upload(device_no: str, device_name: str = None, device_type: str = '1', org_id: int = 0) -> bool:
    """
    当设备上传音频时更新qa_sound_device表的状态
    
    Args:
        device_no: 设备序列号
        device_name: 设备名称（可选，如果设备不存在时使用）
        device_type: 设备类型（1-桌面终端，2-胸牌，3-挂牌）
        org_id: 组织ID
        
    Returns:
        bool: 更新成功返回True
    """
    connection = None
    try:
        connection = mysql.connector.connect(**db_config)
        cursor = connection.cursor()
        
        # 检查设备是否存在
        check_query = "SELECT id FROM qa_sound_device WHERE device_no = %s AND del_flag = '0'"
        cursor.execute(check_query, (device_no,))
        device_exists = cursor.fetchone()
        
        if device_exists:
            # 设备存在，更新状态为上传中(1)和状态时间
            update_query = """
            UPDATE qa_sound_device 
            SET device_status = '1', 
                device_status_time = NOW(), 
                update_time = NOW()
            WHERE device_no = %s AND del_flag = '0'
            """
            cursor.execute(update_query, (device_no,))
            connection.commit()
            
            if cursor.rowcount > 0:
                logger.info(f"设备 {device_no} 状态已更新为上传中")
                return True
            else:
                logger.warning(f"设备 {device_no} 状态更新失败，未影响任何行")
                return False
        else:
            # 设备不存在，插入新记录
            if not device_name:
                device_name = f"设备_{device_no}"  # 默认设备名称
            
            insert_query = """
            INSERT INTO qa_sound_device 
            (device_no, device_name, device_type, org_id, device_status, device_status_time, 
             tenant_id, create_time, update_time, status, del_flag)
            VALUES (%s, %s, %s, %s, '1', NOW(), '000000', NOW(), NOW(), '0', '0')
            """
            cursor.execute(insert_query, (device_no, device_name, device_type, org_id))
            connection.commit()
            
            if cursor.rowcount > 0:
                logger.info(f"设备 {device_no} 不存在，已插入新记录，状态为上传中")
                return True
            else:
                logger.warning(f"设备 {device_no} 插入失败")
                return False
                
    except Error as e:
        logger.error(f"更新设备 {device_no} 状态时数据库错误: {str(e)}")
        return False
    except Exception as e:
        logger.error(f"更新设备 {device_no} 状态时发生意外错误: {str(e)}")
        return False
    finally:
        if connection and connection.is_connected():
            cursor.close()
            connection.close()

def get_device_info(device_no: str) -> dict:
    """
    获取设备信息
    
    Args:
        device_no: 设备序列号
        
    Returns:
        dict: 设备信息，如果设备不存在返回None
    """
    connection = None
    try:
        connection = mysql.connector.connect(**db_config)
        cursor = connection.cursor(dictionary=True)
        
        query = """
        SELECT id, device_no, device_name, device_type, org_id, device_status, 
               device_status_time, tenant_id, create_time, update_time, status
        FROM qa_sound_device 
        WHERE device_no = %s AND del_flag = '0'
        """
        cursor.execute(query, (device_no,))
        result = cursor.fetchone()
        
        if result:
            return dict(result)
        else:
            return None
            
    except Error as e:
        logger.error(f"查询设备 {device_no} 信息时数据库错误: {str(e)}")
        return None
    except Exception as e:
        logger.error(f"查询设备 {device_no} 信息时发生意外错误: {str(e)}")
        return None
    finally:
        if connection and connection.is_connected():
            cursor.close()
            connection.close()

def update_device_status_to_offline(device_no: str) -> bool:
    """
    将设备状态更新为未上传(2)
    
    Args:
        device_no: 设备序列号
        
    Returns:
        bool: 更新成功返回True
    """
    connection = None
    try:
        connection = mysql.connector.connect(**db_config)
        cursor = connection.cursor()
        
        update_query = """
        UPDATE qa_sound_device 
        SET device_status = '2', 
            device_status_time = NOW(), 
            update_time = NOW()
        WHERE device_no = %s AND del_flag = '0'
        """
        cursor.execute(update_query, (device_no,))
        connection.commit()
        
        if cursor.rowcount > 0:
            logger.info(f"设备 {device_no} 状态已更新为未上传")
            return True
        else:
            logger.warning(f"设备 {device_no} 状态更新失败，未影响任何行")
            return False
            
    except Error as e:
        logger.error(f"更新设备 {device_no} 状态为未上传时数据库错误: {str(e)}")
        return False
    except Exception as e:
        logger.error(f"更新设备 {device_no} 状态为未上传时发生意外错误: {str(e)}")
        return False
    finally:
        if connection and connection.is_connected():
            cursor.close()
            connection.close()   