import util
import 小区
from PD import PdUtil
from fileUtil import FileUtil
from 单用户中断时长工作.Input import *
from 单用户中断时长工作.活跃用户数 import get_fttr_user

# 小区+网格+网元

"""
处理小区
底表为到达用户数这个表
1.小区表，到达用户数，明细，合并这三个
2.根据小区id分组计算中断时长，再除60除累计天数得出日均
3.除到达用户数得到单用户中断时长
"""


def handler_cell_level(avg_days, date):
    data = pd.read_excel(reachPath, usecols=['社区ID', '到达用户数'])
    cell_data = pd.read_csv(小区.get_path(), usecols=['分公司', '社区归属网格', '中文名称', '网元内部编码'],
                            encoding='gbk')
    cell_data.rename(columns={'网元内部编码': '社区ID',
                              '中文名称': '小区名称'}, inplace=True)

    data = data.merge(cell_data, on='社区ID', how="left")

    # 统计小区id对应时长
    detail = pd.read_csv(interrupt_detail_path,
                         usecols=['小区ID', '中断时长'])
    detail.rename(columns={'小区ID': '社区ID'}, inplace=True)
    cell_level = detail.groupby('社区ID')['中断时长'].sum().reset_index()
    cell_level['日均中断时长（分钟）'] = (cell_level['中断时长'] / 60 / avg_days).__round__(2)
    # 匹配

    cell_level = data.merge(
        cell_level,
        on='社区ID', how="left")
    # 计算单用户中断时长
    cell_level['单用户中断时长(分钟)'] = cell_level.apply(
        lambda x: 0 if x['到达用户数'] == 0 else (x['日均中断时长（分钟）'] / x['到达用户数']).__round__(2),
        axis=1)
    cell_level = cell_level[
        ["社区ID", "小区名称", "分公司", "到达用户数", "日均中断时长（分钟）", "单用户中断时长(分钟)"]]
    cell_level = cell_level.sort_values('单用户中断时长(分钟)', ascending=False)
    # 第一列添加时间
    cell_level.insert(0, "时间", statistics_date)
    # 填充空白为0
    cell_level.dropna(subset=['分公司'], inplace=True)
    cell_level.fillna(0, inplace=True)
    FileUtil.to_xlsx(cell_level, get_cell_path(date))


def handler_net_level(avg_days, date):
    data2 = pd.read_csv(interrupt_detail_path,
                        usecols=['中断时长', 'OLTIP', '网元名称'])
    # 网元
    net_level = handler_pppoe_olt_user()
    temp = data2.groupby('OLTIP')['中断时长'].sum().reset_index()
    net_level = net_level.merge(temp, on="OLTIP", how="left")
    net_level['日均中断时长(分钟)'] = (net_level['中断时长'] / 60 / avg_days).__round__(2)
    net_level['单用户中断时长(分钟)'] = net_level.apply(
        lambda x: 0 if x['到达用户数'] == 0 else (x['日均中断时长(分钟)'] / x['到达用户数']).__round__(2),
        axis=1)
    # 添加'OLTIP', '分公司', '网元名称'
    temp = pd.read_csv(olt_path, usecols=["管理IP", "网管侧网元名称", "网元名称", "分公司"])
    temp["网管侧网元名称"] = temp["网管侧网元名称"].fillna(temp['网元名称'])
    del temp['网元名称']
    temp.rename(columns={'网管侧网元名称': '网元名称',
                         # '管理单位': '分公司',
                         '管理IP': 'OLTIP'}, inplace=True)

    net_level = net_level.merge(temp[['OLTIP', '分公司', '网元名称']], on="OLTIP")

    net_level = net_level[
        ["OLTIP", "网元名称", "分公司", "到达用户数", "日均中断时长(分钟)", "单用户中断时长(分钟)"]]
    net_level = net_level.sort_values('单用户中断时长(分钟)', ascending=False)
    net_level.insert(0, "时间", statistics_date)
    # 填充空白为0
    net_level.fillna(0, inplace=True)
    FileUtil.to_xlsx(net_level, get_net_path(date))


def handler_grid_level(avg_days, date):
    data = pd.read_excel(reachPath, usecols=['社区ID', '到达用户数'])
    cell_data = pd.read_csv(小区.get_path(), usecols=['分公司', '社区归属网格', '中文名称', '网元内部编码'],
                            encoding='gbk')
    cell_data.rename(columns={'网元内部编码': '社区ID',
                              '中文名称': '小区名称'}, inplace=True)

    data = data.merge(cell_data, on='社区ID', how="left")
    data2 = pd.read_csv(interrupt_detail_path,
                        usecols=['小区ID', '中断时长', 'OLTIP', '网元名称'])
    data2.rename(columns={'小区ID': '社区ID'}, inplace=True)
    data2 = data2.merge(data, on='社区ID')
    # 根据社区归属网格分组
    grid_level = data2.groupby('社区归属网格')[['中断时长', '到达用户数']].sum().reset_index()
    grid_level['日均中断时长（分钟）'] = (grid_level['中断时长'] / 60 / avg_days).__round__(2)

    grid_level = data[['分公司', '社区归属网格']].drop_duplicates(subset=['社区归属网格']).merge(
        grid_level,
        on='社区归属网格')
    grid_level['单用户中断时长(分钟)'] = grid_level.apply(
        lambda x: 0 if x['到达用户数'] == 0 else (x['日均中断时长（分钟）'] / x['到达用户数']).__round__(2),
        axis=1)
    grid_level = grid_level[
        ["社区归属网格", "分公司", "到达用户数", "日均中断时长（分钟）", "单用户中断时长(分钟)"]]
    grid_level = grid_level.sort_values('单用户中断时长(分钟)', ascending=False)
    grid_level.insert(0, "时间", statistics_date)
    # 填充空白为0
    grid_level.dropna(subset=['社区归属网格'], inplace=True)
    grid_level.fillna(0, inplace=True)
    FileUtil.to_xlsx(grid_level, get_gird_path(date))


def 生成明细(date):
    headers = ["日期", "SN", "中断次数", "中断时长", "宽带账号", "OLTIP", "网元名称", "OLTPORT", "OLTIP+OLTPORT",
               'POS标准名称',
               "onuid", "小区ID", "分公司", "小区名称",
               "pppoe上线时间", "光猫型号",
               "社区归属网格"]
    data = pd.read_excel(get_detail_path(date), usecols=headers, dtype={"宽带账号": str})
    data = data[headers]
    data['分公司'] = data['分公司'].fillna('全市')
    path = get_format_detail_path(date)
    print(path)
    FileUtil.mkdir(path)
    with pd.ExcelWriter(path, engine='xlsxwriter') as writer:
        data.to_excel(writer, sheet_name='Sheet1', index=False)


def 分公司(date, xlsx_date):
    column_list = ["分公司", "单用户中断时长", "中断用户平均修复时长", "用户平均中断率", "用户平均中断次数"]
    data = pd.read_excel(get_four_path(date), usecols=column_list)
    data.insert(0, "时间", xlsx_date)
    path = get_company_path(date)
    print(path)
    writer = PdUtil.get_writer(path, data)
    PdUtil.set_number_format(writer, 2, 5)
    writer.close()


# 统计近半年，pppoe里的ip对应用户数
def handler_pppoe_olt_user():
    data = pd.read_csv("D:\\家宽\\pppoe\\PPPOE_20250507.txt", usecols=[4, 10], names=["OLTIP", "时间"],
                       dtype={'宽带账号': str})
    data["时间"] = pd.to_datetime(data["时间"])
    data = data[data["时间"] > "2024-04-07"]
    data = data.groupby("OLTIP").count().reset_index()
    data.rename(columns={"时间": "到达用户数"}, inplace=True)
    return data


def 累计(date):
    data = pd.read_excel(get_detail_path(date), dtype={"宽带账号": str})
    temp = pd.read_excel("D:\\中断输入\\北京八个fttr型号.xlsx", usecols=['北京fttr'])
    data = data.merge(temp, left_on='光猫型号', right_on='北京fttr', how='left')
    if date.day == 1:
        all = pd.DataFrame()
    else:
        all = pd.read_csv(interrupt_detail_path, dtype={"宽带账号": str, "社区归属网格": str, "北京fttr": str})
    data = pd.concat([all, data], axis=0)
    if date.day == (b - 1):
        FileUtil.to_xlsx(data, "D:\\中断输入\\输出\\6月单用户中断明细.xlsx")
    data.to_csv(interrupt_detail_path, index=False)


# 统计各分公司中断时长
def calculate_interruption_duration(data):
    result = data.groupby('分公司').agg(
        中断总次数=('中断次数', 'sum'),
        中断时长总和=('中断时长', 'sum'),
        中断用户数=('分公司', 'count')
    ).reset_index()
    result = get_company().merge(result, on='分公司', how='left')
    # 对除去第一列进行求和，并将结果累加到最后一行
    result.iloc[-1, 1:] = result.iloc[:, 1:].sum()
    return result


# 处理千兆用户
def deal_gigabitUsers(data):
    df = pd.read_csv("D:\\中断输入\\gigabitUsers.csv", names=['宽带账号'], dtype=str)
    data = data.merge(df, on='宽带账号')
    res = calculate_interruption_duration(data)
    res = res[['分公司', '中断时长总和']]
    res.rename(columns={
        '中断时长总和': '千兆中断时长'
    }, inplace=True)
    return res


# 每个月平均修复时长，次数
def deal_fttr(fttr, user):
    fttr_m = calculate_interruption_duration(fttr)
    fttr_m.rename(columns={
        '中断时长总和': 'fttr中断时长总和'
    }, inplace=True)
    fttr_m = fttr_m[['分公司', 'fttr中断时长总和']]
    fttr_m = fttr_m.merge(user, on='分公司', how='left')
    fttr_m['fttr当月'] = fttr_m['fttr中断时长总和'] / 60 / fttr_m['fttr用户数']
    return fttr_m


def stastic(fttr_days, xlsx_date):
    data = pd.read_csv(interrupt_detail_path,
                       usecols=['分公司', '中断时长', '中断次数', '北京fttr', '日期', '宽带账号'], dtype={
            '宽带账号': str
        })
    data['分公司'] = data['分公司'].fillna('全市')

    gigabit = deal_gigabitUsers(data)
    user = get_fttr_user(fttr_days + 1)
    # 筛选fttr
    fttr = data.dropna(subset=['北京fttr'])
    # 当月
    fttr_m = deal_fttr(fttr, user)
    fttr_m['fttr当月'] = fttr_m['fttr当月'] / fttr_days
    # 当天
    fttr_d = fttr[fttr['日期'] == xlsx_date]
    fttr_d = deal_fttr(fttr_d, user)
    fttr_d.rename(columns={'fttr当月': 'fttr当日'}, inplace=True)
    data = calculate_interruption_duration(data)
    data['中断时长总和'] = data['中断时长总和'] / 3600

    data = get_company().merge(data, on='分公司', how='left')
    data = data.merge(fttr_m, on='分公司', how='left')
    data = data.merge(fttr_d, on='分公司', how='left')
    data = data.merge(gigabit, on='分公司', how='left')
    data.to_csv(get_statistic_path(cur_date))


'''
每个月更新fttr活跃用户数sheet
'''
if __name__ == '__main__':

    a = 15
    b = a + 1
    for i in range(a, b):
        xlsx_date = f"2025-06-{util.get_day(i)}"
        cur_date = util.to_date(xlsx_date)
        生成明细(cur_date)
        分公司(cur_date, xlsx_date)
        累计(cur_date)
        stastic(i, xlsx_date)
        handler_cell_level(i, cur_date)
        handler_net_level(i, cur_date)
        handler_grid_level(i, cur_date)
