from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from django.utils import timezone
from data_analysis.models import DataSource, AnalysisTask, AnalysisResult
from data_crawler.models import CrawlerTask, CrawledData
import random
from datetime import datetime, timedelta
import json

User = get_user_model()

class Command(BaseCommand):
    help = '插入测试数据用于数据可视化'

    def handle(self, *args, **kwargs):
        # 1. 创建测试用户
        user = User.objects.create_user(
            username='test_user',
            email='test@example.com',
            password='test123',
            is_active=True
        ) if not User.objects.filter(username='test_user').exists() else User.objects.get(username='test_user')
        
        # 2. 创建数据源
        data_source = DataSource.objects.create(
            name='微博数据源',
            type='weibo',
            credentials={
                'use_proxy': False,
                'timeout': 10,
                'max_retries': 3
            },
            is_active=True,
            user=user
        )
        
        # 3. 创建爬虫任务
        crawler_task = CrawlerTask.objects.create(
            name='微博数据采集',
            data_source=data_source,
            status='completed',
            parameters={
                'keywords': ['Python', '数据分析', '人工智能'],
                'max_pages': 10
            },
            progress=100,
            total_items=100,
            crawled_items=100,
            user=user,
            completed_at=timezone.now()
        )
        
        # 4. 生成爬取的数据
        start_date = timezone.now() - timedelta(days=30)
        topics = ['Python编程', '数据分析', '人工智能', '机器学习', '深度学习', 
                 '大数据', '云计算', '区块链', '物联网', '5G技术']
        
        for i in range(100):
            created_at = start_date + timedelta(
                days=random.randint(0, 29),
                hours=random.randint(0, 23),
                minutes=random.randint(0, 59)
            )
            
            reposts = random.randint(0, 1000)
            comments = random.randint(0, 2000)
            likes = random.randint(0, 5000)
            post_topics = random.sample(topics, random.randint(1, 3))
            
            data = {
                'post': {
                    'id': f'test_post_{i}',
                    'content': f'这是一条测试微博 #{post_topics[0]}# 分享技术经验和见解。',
                    'created_at': created_at.isoformat(),
                    'source': '微博 weibo.com',
                    'engagement': {
                        'reposts': reposts,
                        'comments': comments,
                        'likes': likes
                    },
                    'media': {
                        'pictures': [],
                        'topics': post_topics,
                        'mentions': []
                    }
                },
                'user': {
                    'id': f'test_user_{random.randint(1, 20)}',
                    'name': f'测试用户_{random.randint(1, 20)}',
                    'followers_count': random.randint(100, 10000),
                    'verified': random.choice([True, False])
                }
            }
            
            CrawledData.objects.create(
                task=crawler_task,
                data=data,
                metadata={
                    'platform': 'weibo',
                    'keyword': random.choice(['Python', '数据分析', '人工智能']),
                    'crawled_at': timezone.now().isoformat(),
                    'engagement_count': reposts + comments + likes,
                    'user_id': data['user']['id']
                }
            )
        
        # 5. 创建分析任务
        analysis_task = AnalysisTask.objects.create(
            name='微博数据分析',
            type='sentiment',
            status='completed',
            parameters={
                'data_source_id': data_source.id,
                'start_date': start_date.isoformat(),
                'end_date': timezone.now().isoformat()
            },
            progress=100,
            user=user,
            data_source=data_source,
            completed_at=timezone.now()
        )
        
        # 6. 创建分析结果
        # 6.1 趋势分析结果
        AnalysisResult.objects.create(
            title='发帖趋势分析',
            description='近30天发帖数量趋势',
            data={
                'dates': [(start_date + timedelta(days=i)).strftime('%Y-%m-%d') for i in range(30)],
                'values': [random.randint(10, 100) for _ in range(30)]
            },
            visualization_type='line',
            task=analysis_task
        )
        
        # 6.2 话题分布结果
        AnalysisResult.objects.create(
            title='话题分布分析',
            description='热门话题分布情况',
            data={
                'topics': topics[:5],
                'counts': [random.randint(50, 200) for _ in range(5)]
            },
            visualization_type='pie',
            task=analysis_task
        )
        
        # 6.3 情感分析结果
        AnalysisResult.objects.create(
            title='情感分析',
            description='内容情感倾向分析',
            data={
                'categories': ['正面', '中性', '负面'],
                'values': [random.randint(100, 300) for _ in range(3)]
            },
            visualization_type='bar',
            task=analysis_task
        )
        
        # 6.4 用户活跃度分析
        AnalysisResult.objects.create(
            title='用户活跃度分析',
            description='用户发帖和互动情况分析',
            data={
                'users': [f'用户{i}' for i in range(1, 6)],
                'posts': [random.randint(5, 20) for _ in range(5)],
                'engagements': [random.randint(100, 1000) for _ in range(5)]
            },
            visualization_type='scatter',
            task=analysis_task
        )
        
        self.stdout.write(self.style.SUCCESS('Successfully inserted test data')) 