import pandas as pd
from sqlalchemy import create_engine
import numpy as np
from datetime import datetime, timedelta
from pandas import json_normalize
import json

class CheatAnalyzer:
    def __init__(self, db_uri):
        self.engine = create_engine(db_uri)
        self.today = datetime.now().date()

    def run_analysis(self):
        # 1. 获取前一天的数据
        df = self._fetch_yesterday_data()

        # 2. 按用户分组分析
        results = []
        for userId, group in df.groupby('user_id'):
            analysis = self._analyze_user(group.copy(), userId)
            results.append(analysis)

        # 3. 存储分析结果
        self._save_results(results)

    def _fetch_yesterday_data(self):
        # yesterday = self.today - timedelta(days=1)
        query = f"""
        SELECT * FROM announce_logs
        """
        return pd.read_sql(query, self.engine)

    def _analyze_user(self, user_data, userId):
        # 按时间排序
        user_data = user_data.sort_values('timestamp')

        # 计算瞬时速度风险
        speed_risk = self._calculate_speed_risk(user_data)

        # 计算多IP风险
        ip_risk = self._calculate_ip_risk(user_data)

        # 计算行为异常风险
        behavior_risk = self._calculate_behavior_risk(user_data)

        # 综合风险(加权平均)
        overall_risk = 0.4*speed_risk + 0.3*ip_risk + 0.3*behavior_risk

        return {
            'user_id': userId,
            'analysis_date': self.today ,
            'speed_risk': float(speed_risk),
            'ip_risk': float(ip_risk),
            'behavior_risk': float(behavior_risk),
            'overall_risk': float(overall_risk),
            'details': {
                'ip_list': user_data['ip'].unique().tolist(),
                'speed_samples': self._get_speed_samples(user_data),
                'session_count': len(self._split_sessions(user_data))
            }
        }

    def _calculate_speed_risk(self, data):
        # 重组会话
        sessions = self._split_sessions(data)

        max_speeds = []
        for session in sessions:
            if len(session) < 2:
                continue

            # 计算会话内各间隔的速度(MB/s)
            session = session.sort_values('timestamp')
            deltas = session['uploaded'].diff() / (1024*1024)
            times = session['timestamp'].diff().dt.total_seconds()
            speeds = deltas / times.replace(0, 1)  # 避免除以0

            if not speeds.empty:
                max_speeds.append(speeds.max())

        if not max_speeds:
            return 0.0

        # 超过500MB/s视为高风险
        max_speed = max(max_speeds)
        risk = min(max_speed / 500, 1.0)

        # 检查速度曲线平滑度
        if len(max_speeds) > 3:
            std_dev = np.std(max_speeds)
            if std_dev < 5:  # 速度变化过小
                risk = max(risk, 0.7)

        return risk

    def _calculate_ip_risk(self, data):
        unique_ips = data['ip'].nunique()

        if unique_ips == 1:
            return 0.0
        elif unique_ips == 2:
            return 0.3
        elif unique_ips == 3:
            return 0.6
        else:
            return 1.0

    def _calculate_behavior_risk(self, data):
        risk = 0.0

        # # 检查start/stop频率
        # start_stop_ratio = (data['event'] == 'start').sum() / (data['event'] == 'stop').sum()
        # if start_stop_ratio > 3:  # 异常高的start/stop比例
        #     risk = max(risk, 0.5)

        # # 检查汇报间隔异常
        # time_diffs = data['timestamp'].sort_values().diff().dt.total_seconds()
        # if (time_diffs < 5).any():  # 短于5秒的汇报间隔
        #     risk = max(risk, 0.6)

        return risk

    def _split_sessions(self, data):
        """将用户活动数据分割成独立会话"""
        sessions = []
        current_session = []

        for _, row in data.sort_values('timestamp').iterrows():
            if row['event'] == 'start':
                if current_session:
                    sessions.append(pd.DataFrame(current_session))
                    current_session = []
                current_session.append(row)
            elif row['event'] == 'stop' and current_session:
                current_session.append(row)
                sessions.append(pd.DataFrame(current_session))
                current_session = []
            else:
                current_session.append(row)

        if current_session:
            sessions.append(pd.DataFrame(current_session))

        return sessions

    def _get_speed_samples(self, data):
        """获取速度样本用于前端绘图"""
        sessions = self._split_sessions(data)
        samples = []

        for session in sessions:
            if len(session) < 2:
                continue

            session = session.sort_values('timestamp')
            deltas = session['uploaded'].diff()
            times = session['timestamp'].diff().dt.total_seconds()
            speeds = (deltas / times.replace(0, 1)).fillna(0)

            for ts, speed in zip(session['timestamp'], speeds):
                samples.append({
                    'time': ts.isoformat(),
                    'speed': speed / (1024*1024)  # 转换为MB/s
                })

        return samples

    def _save_results(self, results):
        """存储分析结果到数据库"""
        for result in results:
            result['details'] = json.dumps(result['details'])  # 将 details 转换为 JSON 字符串

        df = pd.DataFrame(results)
        df.to_sql('cheat_analysis_results', self.engine,
                if_exists='append', index=False)

