
# 创建数据库引擎
from sqlalchemy import create_engine, text
from datetime import datetime, timedelta
import time
import pandas as pd
import os
import json
import numpy as np
import pandas as pd
import re
from tqdm import tqdm
from datetime import time
engine = create_engine(
   'mysql+mysqlconnector://root:Bz_202501@bj-cdb-ckq2r8ro.sql.tencentcdb.com:25622/bz_system',
    pool_recycle=3600,
    echo=False,  # 将echo从True改为False以关闭SQL日志
    isolation_level="READ COMMITTED",
    pool_pre_ping=True
)
# 定义表名映射
date_flag_mapping = {
    'sle_bioage': 'endDate',
    'daily_summary': 'startDate',
    'hrv_daily': 'startDate',
    'pwf_daily': 'startDate',
    'mental_daily':'startDate',
    'sleep_noon_to_noon':'startDate',
    'near_real_time':'endDate',
    'fitness_event':'startDate',
    'hrv_continuous':'startDate',
    'mental_hourly':'startDate',
    'pwf_continuous':'startDate',
    'sleep_episodes':'startTime',
}

def save_stats_to_db(df):
    """将统计结果保存到 folder_count 表"""
    if df.empty:
        print("无数据需要插入数据库。")
        return
    # 转换 numpy.int64 为 Python int
    for col in df.columns:
        if df[col].dtype == 'int64' or df[col].dtype == 'float64':
            df[col] = df[col].astype(int)
    # 插入数据库
    table = 'folder_count'
    insert_sql = f"""
        INSERT INTO {table} (
            profile_id, date_flag, sle_bioage, daily_summary, hrv_daily, pwf_daily, mental_daily,
            sleep_noon_to_noon, near_real_time, fitness_event, hrv_continuous, mental_hourly,
            pwf_continuous, sleep_episodes, ott_analysis,near_real_time_night, ott_analysis_night
        ) VALUES (
            :profile_id, :date_flag, :sle_bioage, :daily_summary, :hrv_daily, :pwf_daily, :mental_daily,
            :sleep_noon_to_noon, :near_real_time, :fitness_event, :hrv_continuous, :mental_hourly,
            :pwf_continuous, :sleep_episodes, :ott_analysis,:near_real_time_night, :ott_analysis_night
        )
        ON DUPLICATE KEY UPDATE
            sle_bioage=VALUES(sle_bioage),
            daily_summary=VALUES(daily_summary),
            hrv_daily=VALUES(hrv_daily),
            pwf_daily=VALUES(pwf_daily),
            mental_daily=VALUES(mental_daily),
            sleep_noon_to_noon=VALUES(sleep_noon_to_noon),
            near_real_time=VALUES(near_real_time),
            fitness_event=VALUES(fitness_event),
            hrv_continuous=VALUES(hrv_continuous),
            mental_hourly=VALUES(mental_hourly),
            pwf_continuous=VALUES(pwf_continuous),
            sleep_episodes=VALUES(sleep_episodes),
            ott_analysis=VALUES(ott_analysis),
            near_real_time_night=VALUES(near_real_time_night),
            ott_analysis_night=VALUES(ott_analysis_night)
    """
    data = df.to_dict(orient='records')
    with engine.begin() as conn:
        conn.execute(text(insert_sql), data)
    #print(f"已插入/更新 {len(df)} 条数据到 {table}")



def read_sysuser():
    user_database = 'sys_user'
    query = f"SELECT user_id,profile_id FROM {user_database} "
    try:
        df = pd.read_sql(query, engine)
        return df
    except Exception as e:
        print(f"从数据库读取用户数据失败: {e}")
        return pd.DataFrame()  # 返回空DataFrame作为错误处理
def read_data_from_folder():
    static_days = 6
    total_data = []
    for folder_name, witch_date in tqdm(date_flag_mapping.items(), desc="遍历各个folder近static_days天的数据"):
        root_path = f'/mnt/{folder_name}'
        latest_date = (datetime.now() - timedelta(days=static_days)).strftime('%Y-%m-%d')
        for date in os.listdir(root_path):##到了日期这一块
            if date > latest_date:
                date_path = os.path.join(root_path, date)
                for file_id in os.listdir(date_path):##profileid
                    file_path = os.path.join(date_path, file_id)
                    for json_file in os.listdir(file_path):##这里是时间戳
                        json_path = os.path.join(file_path, json_file)
                        
                        try:
                            with open(json_path, 'r') as f:
                                data = json.load(f)
                                if not data['data']:
                                    continue
                                row = {'folder':folder_name,'date_flag':-1, 'profile_id': file_id }
                                if folder_name == 'near_real_time':
                                    
                                    dt = data['header']['startTime']#"08:24:16"
                                    if len(dt.split(':')) == 2:
                                        dt_time = datetime.strptime(dt, "%H:%M").time()
                                    else:
                                        dt_time = datetime.strptime(dt, "%H:%M:%S").time()
                                    if time(0, 0, 0) <= dt_time < time(7, 0, 0):
                                        night_row = {
                                            'folder': 'near_real_time_night',
                                            'date_flag': date,  # 用外层循环的 date 变量
                                            'profile_id': file_id
                                        }
                                        total_data.append(night_row)
                                if folder_name == 'sleep_episodes':
                                    row['date_flag']=datetime.fromtimestamp(int(data['header'][witch_date])).strftime('%Y-%m-%d')
                                else:
                                    row['date_flag']=data['header'][witch_date][0:10]
                           
                                total_data.append(row)
                        except (json.JSONDecodeError, KeyError) as e:
                            print(f"Error reading {json_path}: {e}")
                            continue
    total_data_df = pd.DataFrame(total_data)
    ##对每天每个用户进行遍历
    result = []
    for (profile_id, date_flag), group in tqdm(total_data_df.groupby(['profile_id', 'date_flag']),desc="统计每个用户每天的文件数量"):
        ##读出profile_id和user_id的映射关系
        row = {
            'sle_bioage': 0,
            'daily_summary': 0,
            'hrv_daily': 0,
            'pwf_daily': 0,
            'mental_daily':0,
            'sleep_noon_to_noon':0,
            'near_real_time':0,
            'fitness_event':0,
            'hrv_continuous':0,
            'mental_hourly':0,
            'pwf_continuous':0,
            'sleep_episodes':0,
            'ott_analysis': 0,
            'ott_analysis_night': 0,  
            'near_real_time_night': 0,
            'date_flag':date_flag,

            'profile_id': profile_id,

        }
        # 统计每个folder的数量
        folder_counts = group['folder'].value_counts()
        for folder_name in folder_counts.index:
            if folder_name in row:
                row[folder_name] = int(folder_counts[folder_name])
        result.append(row)
    result = pd.DataFrame(result)
    for date_flag in tqdm(os.listdir('/mnt/ott_analysis'),desc="统计OTT分析数据"):
        if date_flag < (datetime.now() - timedelta(days=static_days)).strftime('%Y-%m-%d'):
            continue
        date_path = os.path.join('/mnt/ott_analysis', date_flag)
        for profile_id in os.listdir(date_path):
            user_path = os.path.join(date_path, profile_id)
            count = 0
            night_count = 0
            for json_name in os.listdir(user_path):
                json_path = os.path.join(user_path, json_name)
                with open(json_path, 'r') as f:
                    data = json.load(f)
                    for file in data['files']:
                        match = re.search(r'ael_(\d+)\.ott', file['file_name'])
                        timestamp = int(match.group(1)[0:10])
                        upload_time = datetime.fromtimestamp(int(timestamp)).strftime('%H:%M:%S')
                        upload_time_obj = datetime.strptime(upload_time, '%H:%M:%S').time()
                        if time(0, 0, 0) <= upload_time_obj < time(7, 0, 0):
                            night_count += 1
                    count = count + len(data['files'])
            result.loc[(result['profile_id'] == profile_id) & (result['date_flag'] == date_flag), 'ott_analysis'] = count
            result.loc[(result['profile_id'] == profile_id) & (result['date_flag'] == date_flag), 'ott_analysis_night'] = night_count
    print(result.head())
    return result
  

    
def hourly_stats():
    """每小时执行一次的数据统计"""
    while True:
        #try:
            # now = datetime.now()
            # # 计算下一个整点时间
            # next_hour = (now + timedelta(hours=1)).replace(minute=0, second=0, microsecond=0)
            # sleep_time = (next_hour - now).total_seconds()
            # time.sleep(sleep_time)

            
            rows_to_insert = read_data_from_folder()

            save_stats_to_db(rows_to_insert)
            print(f"统计数据已更新于 {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
            break
            #time.sleep(sleep_time)

        #except Exception as e:
            #print(f"统计数据时出错: {e}")

if __name__ == '__main__':
    hourly_stats()