# -*- coding: utf-8 -*-
# @Author: hunan
# @Date: 2024/12/3 17:13
# @Description:
import json
import logging
import os
import socket

import redis

from product_upload.util.basic.common_util import sleep_random_duration

logger = logging.getLogger(__name__)

OLD_REDIS_HOST = '127.0.0.1' if socket.gethostname() in ['admin_pro', 'dev3070', 'hunan'] else '192.168.1.2'
OLD_REDIS_PORT = 6379
OLD_REDIS_DB = 0
OLD_REDIS_PASSWORD = "hunan2024rmdQLKJ"

OLD_AMAZON_US_XLSM_KEY_PREFIX = 'amazon_us_rule:xlsm:'
OLD_AMAZON_US_COMMON_KEY_PREFIX = 'amazon_us_rule:common:'

# Global cache for Amazon US data
OLD_AMAZON_US_CACHE = {
    'common': {},
    'product_types': {}
}

old_redis_client = redis.Redis(
    host=OLD_REDIS_HOST,
    port=OLD_REDIS_PORT,
    db=OLD_REDIS_DB,
    password=OLD_REDIS_PASSWORD,
    decode_responses=True
)


def old_load_json_from_redis(prefix, key):
    """Load JSON data from Redis by key"""
    max_attempts = 3
    attempt = 0
    while attempt < max_attempts:
        try:
            data = old_redis_client.get(f"{prefix}{key}")
            if data:
                return json.loads(data)
        except Exception as e:
            attempt += 1
            if attempt >= 2:
                logger.error(f"Error loading JSON from Redis: {e}")
            sleep_random_duration(0.1, 0.2)
    return None


def old_save_json_to_redis(prefix, key, data):
    """Save JSON data to Redis"""
    try:
        old_redis_client.set(f"{prefix}{key}", json.dumps(data))
        return True
    except Exception as e:
        logger.error(f"Error saving JSON to Redis: {e}")
        return False


def old_load_json_from_file(filepath):
    try:
        if os.path.exists(filepath):
            with open(filepath, 'r', encoding='utf-8') as f:
                return json.load(f)
        return None
    except Exception as e:
        logger.error(f"Error loading JSON from file {filepath}: {e}")
        return None


def old_load_amazon_us_data_to_cache():
    """Loads all Amazon US data from Redis into the global cache"""
    global OLD_AMAZON_US_CACHE

    # Load common data
    common_keys = ["not_ai_field", "prompt", "sensitive", "class_type_map", "node", "all_product_type_list"]
    for key in common_keys:
        data = old_load_json_from_redis(OLD_AMAZON_US_COMMON_KEY_PREFIX, key)
        if data:
            OLD_AMAZON_US_CACHE['common'][key] = data
        # Load product type specific data
    if 'all_product_type_list' in OLD_AMAZON_US_CACHE['common']:
        product_types = OLD_AMAZON_US_CACHE['common']['all_product_type_list']
        for product_type in product_types:
            OLD_AMAZON_US_CACHE['product_types'][product_type] = {}
            rule_keys = ["data_definition", "drop_list", "field_map", "required", "template", "valid_values"]
            for rule_key in rule_keys:
                key = f"{product_type}:{rule_key}"
                data = old_load_json_from_redis(OLD_AMAZON_US_XLSM_KEY_PREFIX, key)
                if data:
                    # The data is double-encoded in JSON (from the initialize function)
                    try:
                        if isinstance(data, str):
                            data = json.loads(data)
                    except:
                        pass
                    OLD_AMAZON_US_CACHE['product_types'][product_type][rule_key.replace("_list", "")] = data

    logger.info("Old Amazon US data loaded into cache successfully")


# Function to initialize Redis with all JSON data from files
def old_initialize_redis_from_files():
    """Load all JSON files into Redis on startup"""
    user_directory = os.path.expanduser("~")
    base_json_path = os.path.join(user_directory, "Desktop", "amazon_rule", "json")

    # Load all product types first
    all_product_type_list = old_load_json_from_file(os.path.join(base_json_path, "all_product_type_list.json"))
    old_save_json_to_redis(OLD_AMAZON_US_COMMON_KEY_PREFIX, "all_product_type_list", all_product_type_list)

    # Load rule files
    for type_file in all_product_type_list:
        data_definition_path = os.path.join(base_json_path, type_file, "data_definition.json")
        data_definition_key = f'{type_file}:data_definition'
        drop_list_path = os.path.join(base_json_path, type_file, "drop_list.json")
        drop_list_key = f'{type_file}:drop_list'
        field_map_path = os.path.join(base_json_path, type_file, "field_map.json")
        field_map_key = f'{type_file}:field_map'
        required_path = os.path.join(base_json_path, type_file, "required.json")
        required_key = f'{type_file}:required'
        template_path = os.path.join(base_json_path, type_file, "template.json")
        template_key = f'{type_file}:template'
        valid_values_path = os.path.join(base_json_path, type_file, "valid_values.json")
        valid_values_key = f'{type_file}:valid_values'

        old_save_json_to_redis(OLD_AMAZON_US_XLSM_KEY_PREFIX, data_definition_key, old_load_json_from_file(data_definition_path))
        old_save_json_to_redis(OLD_AMAZON_US_XLSM_KEY_PREFIX, drop_list_key, old_load_json_from_file(drop_list_path))
        old_save_json_to_redis(OLD_AMAZON_US_XLSM_KEY_PREFIX, field_map_key, old_load_json_from_file(field_map_path))
        old_save_json_to_redis(OLD_AMAZON_US_XLSM_KEY_PREFIX, required_key, old_load_json_from_file(required_path))
        old_save_json_to_redis(OLD_AMAZON_US_XLSM_KEY_PREFIX, template_key, old_load_json_from_file(template_path))
        old_save_json_to_redis(OLD_AMAZON_US_XLSM_KEY_PREFIX, valid_values_key, old_load_json_from_file(valid_values_path))
    common_files = ["not_ai_field", "prompt", "sensitive", "class_type_map", "node"]
    for file_name in common_files:
        filepath = os.path.join(base_json_path, f"{file_name}.json")
        old_save_json_to_redis(OLD_AMAZON_US_COMMON_KEY_PREFIX, file_name, old_load_json_from_file(filepath))
    logger.info("Old Redis initialization complete")

    # Load data into global cache
    old_load_amazon_us_data_to_cache()


# 过滤并提取过滤后list对应的map
def filter_dict_by_keys(input_dict, field_list, filter_field_list):
    need_list = list((set(field_list) - set(filter_field_list)))
    return {key: input_dict[key] for key in need_list if key in input_dict}


# 获取做成配置的真实的product_type
def old_get_real_product_type(amazon_key: str):
    if not amazon_key:
        return ""

    # Normalize the key
    normalized_key = amazon_key.replace("-", "").replace("_", "").lower()

    # Check if all_product_type_list is in the cache
    if 'all_product_type_list' in OLD_AMAZON_US_CACHE['common']:
        for key in OLD_AMAZON_US_CACHE['common']['all_product_type_list']:
            if key.lower().replace("-", "").replace("_", "") == normalized_key:
                return key

    # Fallback to Redis
    all_product_type_list = old_load_json_from_redis(OLD_AMAZON_US_COMMON_KEY_PREFIX, "all_product_type_list")
    if all_product_type_list:
        for key in all_product_type_list:
            if key.lower().replace("-", "").replace("_", "") == normalized_key:
                return key
    return ""


def old_get_product_data(product_type):
    """
    Get all data (both XLSM and common) for a specific product type

    Args:
        product_type (str): Product type to get data for

    Returns:
        dict: Combined dictionary with all product-specific and common data
    """
    if not product_type:
        return None

    # Normalize product type if it's not in the exact format
    real_product_type = old_get_real_product_type(product_type)
    if not real_product_type:
        return None

    result = {
        'product_type': real_product_type,
        'product_data': {},
        'common_data': {}
    }

    # Try to get data from cache first
    if real_product_type in OLD_AMAZON_US_CACHE['product_types']:
        result['product_data'] = OLD_AMAZON_US_CACHE['product_types'][real_product_type].copy()
    else:
        # Fallback: Try to get product data from Redis
        rule_keys = ["data_definition", "drop_list", "field_map", "required", "template", "valid_values"]
        for rule_key in rule_keys:
            key = f"{real_product_type}:{rule_key}"
            data = old_load_json_from_redis(OLD_AMAZON_US_XLSM_KEY_PREFIX, key)
            if data:
                try:
                    if isinstance(data, str):
                        data = json.loads(data)
                except:
                    pass
                result['product_data'][rule_key.replace("_list", "")] = data

    # Add common data
    if OLD_AMAZON_US_CACHE['common']:
        result['common_data'] = OLD_AMAZON_US_CACHE['common'].copy()
    else:
        # Fallback: Try to get common data from Redis
        common_keys = ["not_ai_field", "prompt", "sensitive", "class_type_map", "node", "all_product_type_list"]
        for key in common_keys:
            data = old_load_json_from_redis(OLD_AMAZON_US_COMMON_KEY_PREFIX, key)
            if data:
                result['common_data'][key] = data
    return result


# 根据product_type返回他各种rule
def old_load_rule_map_by_product_type(amazon_key: str):
    """
    Legacy function that returns rule map for a product type.
    Now uses get_product_data internally for consistency.
    """
    if not amazon_key:
        return None

    product_data = old_get_product_data(amazon_key)
    if not product_data:
        return None

    # Convert the new structure to the old format for backward compatibility
    rule_map = product_data['product_data'].copy()

    # Add common data that was included in the old format
    common_keys_to_include = ["node", "not_ai_field", "prompt", "sensitive", "class_type_map"]
    for key in common_keys_to_include:
        if key in product_data['common_data']:
            rule_map[key] = product_data['common_data'][key]

    rule_map["product_type"] = product_data['product_type']

    return rule_map


def old_get_xlsm_data_for_product(product_type):
    """
    Get only XLSM-specific data for a product type

    Args:
        product_type (str): Product type to get XLSM data for

    Returns:
        dict: Product-specific XLSM data
    """
    product_data = old_get_product_data(product_type)
    if not product_data:
        return None
    return product_data['product_data']


def old_get_common_data():
    """
    Get only common data that applies to all product types

    Returns:
        dict: Common data for all products
    """
    # Return from cache if available
    if OLD_AMAZON_US_CACHE['common']:
        return OLD_AMAZON_US_CACHE['common'].copy()

    # Try to load from Redis
    result = {}
    common_keys = ["not_ai_field", "prompt", "sensitive", "class_type_map", "node", "all_product_type_list"]
    for key in common_keys:
        data = old_load_json_from_redis(OLD_AMAZON_US_COMMON_KEY_PREFIX, key)
        if data:
            result[key] = data

    return result


def old_ensure_data_loaded():
    """
    Ensure that all data is loaded into cache
    Call this function at the start of your application to preload all data
    """
    if not OLD_AMAZON_US_CACHE['common'] or not OLD_AMAZON_US_CACHE['product_types']:
        # If cache is empty, try to load from Redis
        old_load_amazon_us_data_to_cache()

        # If still empty, initialize from files
        if not OLD_AMAZON_US_CACHE['common'] or not OLD_AMAZON_US_CACHE['product_types']:
            old_initialize_redis_from_files()


def old_get_template_file_path(product_type):
    user_directory = os.path.expanduser("~")
    base_excel_path = os.path.join(user_directory, "Desktop", "amazon_rule", "origin")
    product_type_ = old_get_real_product_type(product_type)
    return os.path.join(base_excel_path, f"{product_type_}.xlsm")


def old_get_class_name(amazon_key: str):
    if not amazon_key:
        return ""
    # Normalize the key
    normalized_key = amazon_key.replace("-", "").replace("_", "").lower()
    # Get the real product type
    real_product_type = old_get_real_product_type(normalized_key)
    if not real_product_type:
        return ""
    # Check if class_type_map is in the cache
    if 'class_type_map' in OLD_AMAZON_US_CACHE['common']:
        return OLD_AMAZON_US_CACHE['common']['class_type_map'].get(real_product_type, "")
    # Fallback to Redis
    class_map = old_load_json_from_redis(OLD_AMAZON_US_COMMON_KEY_PREFIX, "class_type_map")
    if class_map:
        return class_map.get(real_product_type, "")
    return ""


def old_get_last_catalog_name(catalog_id):
    if catalog_id is None:
        return ""

    # Check if the node data is in the cache
    if 'node' in OLD_AMAZON_US_CACHE['common']:
        return OLD_AMAZON_US_CACHE['common']['node'].get(catalog_id, "N/A")

    # Fallback to Redis if not in cache
    node_data = old_load_json_from_redis(OLD_AMAZON_US_COMMON_KEY_PREFIX, "node")
    if node_data:
        return node_data.get(catalog_id, "N/A")

    return ""


if __name__ == '__main__':
    pass
