"""
景点数据综合优化命令
1. 去重和合并重复景点
2. 丰富各类别景点数据
3. 确保图片数据正确
"""
from django.core.management.base import BaseCommand
from django.db.models import Q
from apps.attractions.models import Attraction
from decimal import Decimal
import re


class Command(BaseCommand):
    help = '综合优化景点数据：去重、丰富类别、修复图片'

    def add_arguments(self, parser):
        parser.add_argument(
            '--dry-run',
            action='store_true',
            help='仅显示将要执行的操作，不实际执行'
        )
        parser.add_argument(
            '--deduplicate-only',
            action='store_true',
            help='仅执行去重操作'
        )
        parser.add_argument(
            '--enrich-only',
            action='store_true',
            help='仅执行丰富数据操作'
        )

    def handle(self, *args, **options):
        dry_run = options['dry_run']
        deduplicate_only = options['deduplicate_only']
        enrich_only = options['enrich_only']
        
        self.stdout.write(self.style.SUCCESS('=' * 60))
        self.stdout.write(self.style.SUCCESS('开始景点数据综合优化'))
        self.stdout.write(self.style.SUCCESS('=' * 60))
        self.stdout.write(f'模式: {"预览模式（不实际执行）" if dry_run else "执行模式"}')
        self.stdout.write('')

        if not enrich_only:
            # 步骤1: 去重和合并
            self.stdout.write(self.style.WARNING('步骤1: 景点去重和合并'))
            self.deduplicate_attractions(dry_run)
        
        if not deduplicate_only:
            # 步骤2: 丰富各类别数据（需要调用另一个命令）
            self.stdout.write(self.style.WARNING('\n步骤2: 丰富各类别景点数据'))
            self.stdout.write(self.style.INFO('请运行以下命令来丰富各类别数据：'))
            self.stdout.write(self.style.INFO('  python manage.py enrich_attractions_by_category --category all --max-count 50'))
        
        # 步骤3: 修复图片数据
        self.stdout.write(self.style.WARNING('\n步骤3: 修复图片数据'))
        self.fix_image_data(dry_run)
        
        self.stdout.write(self.style.SUCCESS('\n' + '=' * 60))
        self.stdout.write(self.style.SUCCESS('优化完成！'))
        self.stdout.write(self.style.SUCCESS('=' * 60))

    def calculate_distance(self, lat1, lon1, lat2, lon2):
        """计算两点之间的距离（公里）"""
        from math import radians, cos, sin, asin, sqrt
        
        lat1, lon1, lat2, lon2 = map(radians, [float(lat1), float(lon1), float(lat2), float(lon2)])
        dlat = lat2 - lat1
        dlon = lon2 - lon1
        a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2
        c = 2 * asin(sqrt(a))
        r = 6371
        return c * r

    def name_similarity(self, name1, name2):
        """计算两个名称的相似度"""
        def normalize(name):
            name = name.strip()
            name = re.sub(r'[-_].*$', '', name)
            name = re.sub(r'\(.*?\)', '', name)
            name = re.sub(r'（.*?）', '', name)
            return name.strip()
        
        norm1 = normalize(name1)
        norm2 = normalize(name2)
        
        if norm1 == norm2:
            return 1.0
        if norm1 in norm2 or norm2 in norm1:
            return 0.8
        
        set1 = set(norm1)
        set2 = set(norm2)
        if len(set1) == 0 or len(set2) == 0:
            return 0.0
        
        intersection = len(set1 & set2)
        union = len(set1 | set2)
        return intersection / union if union > 0 else 0.0

    def deduplicate_attractions(self, dry_run):
        """去重和合并重复景点"""
        attractions = Attraction.objects.all().order_by('id')
        duplicates = []
        processed_ids = set()
        
        for i, attr1 in enumerate(attractions):
            if attr1.id in processed_ids:
                continue
                
            similar_attrs = []
            for attr2 in attractions[i+1:]:
                if attr2.id in processed_ids:
                    continue
                
                similarity = self.name_similarity(attr1.name, attr2.name)
                if similarity >= 0.7:
                    distance = self.calculate_distance(
                        attr1.latitude, attr1.longitude,
                        attr2.latitude, attr2.longitude
                    )
                    if distance < 1.0:
                        similar_attrs.append((attr2, similarity, distance))
            
            if similar_attrs:
                candidates = [attr1] + [attr[0] for attr in similar_attrs]
                best = max(candidates, key=lambda a: (
                    a.rating or 0,
                    a.popularity or 0,
                    len(a.description or ''),
                    len(a.address or ''),
                    len(a.amap_image_urls or [])
                ))
                
                to_merge = [a for a in candidates if a.id != best.id]
                if to_merge:
                    duplicates.append((best, to_merge))
                    processed_ids.update([a.id for a in to_merge])
                    processed_ids.add(best.id)
        
        merged_count = 0
        for keep, merge_list in duplicates:
            self.stdout.write(f'\n保留: {keep.name} (ID: {keep.id})')
            for attr in merge_list:
                self.stdout.write(f'  合并: {attr.name} (ID: {attr.id})')
                if not dry_run:
                    self.migrate_data(keep, attr)
                    attr.delete()
                merged_count += 1
        
        self.stdout.write(self.style.SUCCESS(f'\n共合并 {merged_count} 个重复景点'))

    def migrate_data(self, keep, merge):
        """迁移数据"""
        # 迁移评价
        for review in merge.reviews.all():
            review.attraction = keep
            review.save()
        
        # 迁移收藏
        for favorite in merge.favorited_by.all():
            if not keep.favorited_by.filter(user=favorite.user).exists():
                favorite.attraction = keep
                favorite.save()
            else:
                favorite.delete()
        
        # 迁移图片
        for image in merge.images.all():
            image.attraction = keep
            image.save()
        
        # 更新评分
        from django.db.models import Avg
        avg_rating = keep.reviews.aggregate(avg=Avg('rating'))['avg']
        if avg_rating:
            keep.rating = round(avg_rating, 2)
        
        # 更新热度
        keep.popularity = max(keep.popularity or 0, merge.popularity or 0)
        
        # 合并描述和地址
        if not keep.description or len(keep.description) < len(merge.description or ''):
            keep.description = merge.description
        if not keep.address and merge.address:
            keep.address = merge.address
        
        # 合并高德地图数据
        if not keep.amap_poi_id and merge.amap_poi_id:
            keep.amap_poi_id = merge.amap_poi_id
        
        if merge.amap_image_urls:
            existing_images = set(keep.amap_image_urls or [])
            new_images = set(merge.amap_image_urls)
            keep.amap_image_urls = list(existing_images | new_images)
        
        keep.save()

    def fix_image_data(self, dry_run):
        """修复图片数据"""
        attractions = Attraction.objects.all()
        fixed_count = 0
        
        for attr in attractions:
            updated = False
            
            # 如果没有主图但有高德地图图片，使用第一张作为主图
            if not attr.primary_image and attr.amap_image_urls:
                if len(attr.amap_image_urls) > 0:
                    # 注意：这里只是标记，实际主图需要从images中获取
                    updated = True
            
            # 确保amap_image_urls是列表格式
            if attr.amap_image_urls and not isinstance(attr.amap_image_urls, list):
                attr.amap_image_urls = []
                updated = True
            
            if updated and not dry_run:
                attr.save()
                fixed_count += 1
        
        self.stdout.write(self.style.SUCCESS(f'修复了 {fixed_count} 个景点的图片数据'))

