import os
import redis
from redis import ConnectionPool

import pymysql
import pickle
import logging
import json
import pybase64
import struct
import numpy as np
from typing import List
from mysql import MYSQL_CONFIG

logging.basicConfig(
    filename= os.path.expanduser('~')+'/app_2.log',
    level=logging.INFO,
    format='%(asctime)s - %(levelname)s - %(message)s'
)

REDIS_POOL = ConnectionPool(
    host='localhost',
    port=6379,
    db=2,
    max_connections=20,
    socket_timeout=0,      # 读写超时时间(秒)
    socket_connect_timeout=5,  # 连接超时时间(秒)
    retry_on_timeout=True,     # 超时后自动重试
    health_check_interval=30   # 健康检查间隔(秒)
)

r = redis.Redis(connection_pool=REDIS_POOL)
def getTable(scheme:str,log_id:str) -> List[str]:
    redis_data = r.get(scheme)
    if redis_data:
        logging.info(f"{log_id}___mysql使用redis数据")
        # 反序列化 JSON 数据
        mysql_results=json.loads(redis_data)
    else:
        mysql_results=saveTable(scheme)
    return mysql_results

def saveTable(scheme) -> List[str]:
    logging.info(f"sql:::1111111")
    conn = pymysql.connect(**MYSQL_CONFIG)
    # 创建游标对象
    cursor = conn.cursor()
    #columns = [col[0] for col in cursor.description]
    # 执行查询
    #logging.info(f"sql:::SELECT id,feature FROM tp_{scheme}_feature where feature!='' and create_time>'{start_time}' and create_time<'{end_time}'")
    logging.info(f"sql:::SELECT id,feature FROM tp_{scheme}_feature where id>1200000 and feature!=''")
    #cursor.execute(f"SELECT id,feature FROM tp_{scheme}_feature where feature!='' and create_time>'{start_time}' and create_time<'{end_time}'")
    cursor.execute(f"SELECT id,feature FROM tp_{scheme}_feature where id>1200000 and feature!=''")
    # 获取所有结果
    mysql_results = cursor.fetchall()
    logging.info(f"查询数据总数：{len(mysql_results)}")
    #r.set("t_"+scheme, json.dumps(mysql_results))
    #saveFeature(mysql_results, scheme+"_feature")
    return mysql_results

def getFeature(feature_dst_list: List[str],scheme:str,log_id:str) -> List[np.ndarray]:
    feature_data = r.get(scheme)
    if feature_data:
        logging.info(f"{log_id}___使用redis的feature数据")
        dst_features = pickle.loads(feature_data)
        #dst_features = [np.array(arr) for arr in dst_featuress]
        #logging.info(f"{log_id}_______2222")
    else:
        logging.info(f"{log_id}___CPU解码")
        saveFeature(feature_dst_list,scheme)
    return dst_features

def saveFeature(feature_dst_list: List[str],scheme:str) -> List[np.ndarray]:
    dst_features = []
    for i, feature_dst in enumerate(feature_dst_list):
        try:
            dst_feature_arr = decode_single_feature(feature_dst[1])
            dst_features.append(dst_feature_arr)
        except Exception as e:
            logging.warning(f"Failed to decode feature at index {i}: {e}")
    r.set("t_"+scheme, pickle.dumps(dst_features))
    r.set("update_status", 1)

def decode_single_feature(feature: str) -> np.ndarray:
    """解码单个特征"""
    try:
        # 使用 pybase64 解码
        feature_dec = pybase64.b64decode(feature)
        feature_dec_size = len(feature_dec) // 4
        # 解析为 NumPy 数组
        return np.array(struct.unpack(f"{feature_dec_size}f", feature_dec)[8:])
    except Exception as e:
        logging.warning(f"Failed to decode feature: {e}")
        return None

def decode_feature_batch(feature_dst_list: List[str]) -> List[np.ndarray]:
    """批量解码特征（合并任务）"""
    # 设置线程数为 CPU 逻辑核心数
    cpu_count = os.cpu_count()
    with ThreadPoolExecutor(max_workers=cpu_count) as executor:
        results = list(executor.map(decode_single_feature, [x[1] for x in feature_dst_list]))
    # 合并结果
    return [x for batch in results for x in batch]

def isUpdate()-> bool:
    status=r.get("update_status")
    logging.info(f"____更新状态：{status}")
    if status is None:  # 如果状态不存在
        return False
    status = int(status)  # 将状态转换为整数
    if status > 0:
        r.set("update_status", 0)  # 重置状态为 0
        return True
    else:
        return False




if __name__ == "__main__":
    saveTable("album")
