import numpy as np
import pymysql
import pandas as pd
from sqlalchemy import create_engine, text, VARCHAR
import datetime
import os

def svip8_yw_feedback(file1, engine):
    if os.path.exists(file1):
        data = pd.read_csv(file1, header=None)
        data.columns = [
            'USER_ID', 'MSISDN', 'START_DATE', 'END_DATE',
            'CELL_1', 'CELL_2', 'CELL_3', 'CALL_TIMES',
            'ABNORMAL_TIMES', 'ABNORMAL_EVENTS', 'DROP_TIMES', 'POOR_TIMES'
        ]
        # 保留 USER_ID 原始值（移除制表符添加逻辑）
        data['USER_ID'] = data['USER_ID'].astype(str).str.replace(r'\s+', '', regex=True)
        data['MSISDN'] = data['MSISDN'].astype(str).str.strip()  # 清洗 MSISDN
        return data
    else:
        raise FileNotFoundError(f"文件 {file1} 未找到。")

def svip_ux_ps_result(file2, engine):
    if os.path.exists(file2):
        columns = [
            'user_id','msisdn', 'stat_date', 's_video_play_fail_times', 's_video_play_success_times',
            's_video_xkb_delay_extra_times', 's_video_xkb_start_times', 's_video_streaming_dl_lowspeed_times',
            's_video_streaming_dl_lowspeed_times_02', 'video_play_fail_times', 'video_play_success_times',
            'video_streaming_dl_lowspeed_times', 'video_streaming_dl_lowspeed_times_02', 'im_interaction_delay',
            'im_interaction_times','im_delay_ul_extra_times', 'im_delay_dl_extra_times', 'im_delay_extra_times',
            'im_delay_extra_times_02', 'web_rsp_fail_times', 'web_req_times','web_rsp_extra_times', 'web_dl_low_speed_times'
        ]
        df = pd.read_csv(file2,  header=None, names=columns).dropna(how='all')
        # 严格清洗 user_id 和 msisdn
        df['user_id'] = df['user_id'].astype(str).str.strip()
        df['msisdn'] = df['msisdn'].astype(str).str.strip()
        numeric_cols = df.columns.difference(['user_id', 'msisdn', 'stat_date'])
        df[numeric_cols] = df[numeric_cols].replace(['inf', 'N/A'], np.nan).apply(pd.to_numeric, errors='coerce')
        agg_rules = {col: 'sum' for col in numeric_cols}
        agg_rules['stat_date'] = 'first'
        aggregated = df.groupby('msisdn', as_index=False).agg(agg_rules)
        return aggregated


def merge_and_upload(engine, df1, df2):

    # 统一列名大小写
    df1.columns = df1.columns.str.lower()
    df2.columns = df2.columns.str.lower()
    final_data=pd.merge(df1,df2,on='msisdn',how='left')
    final_data =final_data[['user_id', 'msisdn', 'start_date', 'end_date', 'cell_1', 'cell_2','cell_3', 'call_times', 'abnormal_times', 'abnormal_events',
            'drop_times', 'poor_times','s_video_play_fail_times','s_video_play_success_times', 's_video_xkb_delay_extra_times','s_video_xkb_start_times',
            's_video_streaming_dl_lowspeed_times','s_video_streaming_dl_lowspeed_times_02', 'video_play_fail_times','video_play_success_times',
            'video_streaming_dl_lowspeed_times','video_streaming_dl_lowspeed_times_02','im_interaction_delay','im_interaction_times','im_delay_ul_extra_times',
            'im_delay_dl_extra_times','im_delay_extra_times', 'im_delay_extra_times_02','web_rsp_fail_times','web_req_times', 'web_rsp_extra_times','web_dl_low_speed_times']]

    # 写入数据库（处理主键冲突）
    final_data.to_sql(
        'svip8_yw_feedback',
        engine,
        if_exists='append',
        index=False,
        method='multi',
        chunksize=200
    )
    print("数据已成功写入数据库。")


if __name__ == '__main__':
    engine = create_engine('mysql+pymysql://5gzhyy:B6.5gzhyy312@132.91.175.98:8067/g41_wyzx_5gzhyydb')
    file_path = '/oss_luoshen/ywhx/SVIP_gz_table/from_jizuo/'

    times1 = datetime.datetime.now().strftime("%Y%m%d")
    # times1 = '20250623'

    files = {
        'yw_feedback': f"{file_path}SVIP8_YW_FEEDBACK_{times1}.csv",
        'ps_result': f"{file_path}SVIP_UX_PS_RESULT_{times1}.csv"
    }


    file1_data = svip8_yw_feedback(files['yw_feedback'], engine)
    file2_data = svip_ux_ps_result(files['ps_result'], engine)

    # # 打印调试信息
    # print(f"file1_data 列名: {file1_data.columns}")
    # print(f"file2_data 列名: {file2_data.columns}")

    # 验证返回值是否有效
    if file1_data is None or file2_data is None:
        raise ValueError("输入数据无效，请检查文件内容或处理逻辑。")

    merge_and_upload(engine, file1_data, file2_data)
    print(f"[{datetime.datetime.now()}] 数据同步完成")
    print("=" * 50)  # 日志分隔