from rest_framework import serializers
from .mixin import PlatformMappingMixin, TimestampFormatterMixin, StatusFieldsMixin, CrawlerTypeMappingMixin
from .models import CrawlerConfig, CrawlerCookiesAccount, CrawlerStatus, ProxyConfig
import json


class CrawlerStatusSerializer(serializers.ModelSerializer):
    start_time = serializers.SerializerMethodField()
    end_time = serializers.SerializerMethodField()

    class Meta:
        model = CrawlerStatus
        fields = [
            'crawler_status',
            'start_time',
            'end_time',
            'crawler_status_logs'
        ]
    
    def get_start_time(self, obj):
        if obj.start_time:
            return obj.start_time.strftime('%Y-%m-%d %H:%M:%S')
        return None
    
    def get_end_time(self, obj):
        if obj.end_time:
            return obj.end_time.strftime('%Y-%m-%d %H:%M:%S')
        return None


class CrawlerConfigSerializer(TimestampFormatterMixin, StatusFieldsMixin, PlatformMappingMixin, CrawlerTypeMappingMixin, serializers.ModelSerializer):
    task_id = serializers.UUIDField(read_only=True, format='hex_verbose', required=False)
    create_time = serializers.SerializerMethodField()
    crawler_status = serializers.SerializerMethodField()
    start_time = serializers.SerializerMethodField()
    end_time = serializers.SerializerMethodField()
    crawler_status_logs = serializers.SerializerMethodField()
    id_list = serializers.ListField(
        child=serializers.CharField(),
        required=False,
        allow_empty=True
    )
    creator_list = serializers.ListField(
        child=serializers.CharField(),
        required=False,
        allow_empty=True
    )

    class Meta:
        model = CrawlerConfig
        fields = [
            'task_id',
            'platform_name',
            'crawler_type',
            'keyword',
            'id_list',
            'creator_list',
            'remark',
            'is_crawler_comment',
            'is_crawler_sub_comment',
            'is_proxy_enabled',
            'create_time',
            'crawler_status',
            'start_time',
            'end_time',
            'crawler_status_logs'
        ]
        read_only_fields = ['task_id', 'create_time']

    def get_create_time(self, obj):
        return self.format_timestamp(obj.create_time)

    def get_crawler_status(self, obj):
        return self.get_status_field(obj, 'crawler_status')

    def get_start_time(self, obj):
        return self.format_timestamp(self.get_status_field(obj, 'start_time'))

    def get_end_time(self, obj):
        return self.format_timestamp(self.get_status_field(obj, 'end_time'))

    def get_crawler_status_logs(self, obj):
        return self.get_status_field(obj, 'crawler_status_logs')

    def get_platform_name_display(self, obj):
        """获取平台显示名称"""
        return self.get_platform_display(obj.platform_name)
    
    def get_crawler_type_display(self, obj):
        """获取爬虫类型显示名称"""
        return self.get_crawler_type_display(obj.crawler_type)

    def to_representation(self, instance):
        """
        重写数据表示方法
        """
        ret = super().to_representation(instance)
        ret['platform_name'] = self.get_platform_display(instance.platform_name)
        ret['crawler_type'] = self.get_crawler_display(instance.crawler_type)
        ret['id_list'] = json.loads(instance.id_list) if instance.id_list else []
        ret['creator_list'] = json.loads(instance.creator_list) if instance.creator_list else []
        return ret

    def to_internal_value(self, data):
        """
        处理输入数据
        """
        data_copy = data.copy()
        if 'platform_name' in data_copy:
            data_copy['platform_name'] = self.get_platform_code(data_copy['platform_name'])
        
        ret = super().to_internal_value(data_copy)
        if 'id_list' in ret:
            ret['id_list'] = json.dumps(ret['id_list'])
        if 'creator_list' in ret:
            ret['creator_list'] = json.dumps(ret['creator_list'])
        return ret

    def validate_platform_name(self, value):
        """
        验证平台名称
        """
        platform_code = self.get_platform_code(value)
        if platform_code not in self.PLATFORM_MAPPING:
            raise serializers.ValidationError(
                f"平台名称必须是以下选项之一: {', '.join(self.PLATFORM_MAPPING.values())}"
            )
        return platform_code

    def validate(self, data):
        """
        验证至少提供了 keyword、id_list 或 creator_list 中的一个
        """
        keyword = data.get('keyword', '')
        id_list = json.loads(data.get('id_list', '[]')) if data.get('id_list') else []
        creator_list = json.loads(data.get('creator_list', '[]')) if data.get('creator_list') else []

        if not any([keyword, id_list, creator_list]):
            raise serializers.ValidationError(
                "必须提供 'keyword'、'id_list' 或 'creator_list' 中的至少一个值"
            )

        return data


class CrawlerCookiesAccountSerializer(serializers.ModelSerializer):
    user = serializers.StringRelatedField(read_only=True)  # 只读字段

    class Meta:
        model = CrawlerCookiesAccount
        fields = [
            'id',
            'account_name',
            'platform_name',
            'cookies',
            'create_time',
            'update_time',
            'invalid_timestamp',
            'status',
            'user'
        ]
        read_only_fields = ['id', 'create_time', 'update_time', 'invalid_timestamp', 'status', 'user']

    def validate_platform_name(self, value):
        """
        验证平台名称是否在允许的列表中。
        """
        allowed_platforms = ['xhs', 'dy', 'ks', 'wb', 'bili', 'tieba', 'zhihu']
        if value not in allowed_platforms:
            raise serializers.ValidationError(f"平台名称必须在 {allowed_platforms} 之内。")
        return value
    

class TaskListSerializer(serializers.ModelSerializer):
    class Meta:
        model = CrawlerConfig
        fields = ['task_id', 'platform_name', 'keyword', 'create_time']


class ProxyConfigSerializer(serializers.ModelSerializer):
    user = serializers.StringRelatedField(read_only=True)

    class Meta:
        model = ProxyConfig
        fields = [
            'id',
            'provider',
            'username',
            'password',
            'secret_id',
            'secret_key',
            'create_time',
            'update_time',
            'status',
            'user'
        ]
        read_only_fields = ['id', 'create_time', 'update_time', 'status', 'user']
