import math
import os
import time

import influxdb_client
import xlwt
from influxdb_client import InfluxDBClient, Point, WritePrecision
import pandas as pd
from datetime import datetime, timezone, timedelta


def datetime_str2UTC(Y, m, d, H, M, S):
    # 创建一个datetime对象
    dt = datetime(Y, m, d, H, M, S)

    # 将datetime对象转换为UTC时间
    utc_dt = dt.astimezone(timezone.utc)

    # 将UTC时间格式化为字符串
    formatted = utc_dt.strftime("%Y-%m-%dT%H:%M:%SZ")
    return formatted


def total(df):

    # 将'Date'列转换为pandas的datetime类型，以便进行日期处理
    df['Date'] = pd.to_datetime(df['Date'])

    # 创建一个新列 用于存储每个月份到当前日期的平均值
    # 我们使用groupby和cumsum来累加值，并使用groupby和size（或transform('size')）来计算当前日期之前的天数（包括当天）
    df['Monthly_Avg_To_Date'] = df.groupby(df['Date'].dt.to_period('M'))['value'].cumsum() / \
                                df.groupby(df['Date'].dt.to_period('M'))['Date'].transform('size')

    # 注意：上面的代码实际上计算的是到当前月份为止的平均值，因为'size'函数返回的是整个月份的天数，而不是到当前日期为止的天数。
    # 为了精确计算到当前日期为止的平均值，我们需要稍微调整策略

    # 更正为计算到当前日期为止的平均值
    df['Days_In_Month_To_Date'] = df.groupby(df['Date'].dt.to_period('M'))['Date'].cumcount() + 1  # 累计天数，从1开始
    df['Monthly_Avg_To_Date_Corrected'] = df.groupby(df['Date'].dt.to_period('M'))['value'].cumsum() / df[
        'Days_In_Month_To_Date']
    return df

def cai_pf(p1, p2, q1, q2):
    s = math.sqrt(math.pow((p2 - p1), 2) + math.pow((q2 - q1), 2))
    if s == 0:
        return 0
    pf = math.pow((p2 - p1), 2) / s

    return pf


def get_day(start_str = "2024-09-02T16:00:00.000Z", end_str = "2024-09-07T16:00:00.000Z"):
    # 该函数用于获取 每日的和日累计的 实际功率因数

    start_time = datetime.strptime(start_str, "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=timezone.utc)
    # start_time = start_time - timedelta(hours=8)
    end_time = datetime.strptime(end_str, "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=timezone.utc)

    # 计算时间差以确定循环次数
    delta = end_time - start_time
    days = delta.days + 1  # 加1以包含结束日期的信息

    df0 = pd.DataFrame(
        columns=['Date', 'value', 'Monthly_Avg_To_Date_Corrected'],  # 指定列属性
    )

    # 循环遍历每一天
    for day in range(days):
        # 计算当前日期
        current_day = start_time + timedelta(days=day)

        # 计算日初时间
        # 如果时间段开始于当天的某个时间之后，则取当天开始时间；否则取时间段开始时间
        day_start = max(current_day.replace(hour=0, minute=0, second=0, microsecond=0), start_time)
        utc_dt = day_start.astimezone(timezone.utc)
        formatted_start = utc_dt.strftime("%Y-%m-%dT%H:%M:%SZ")
        # 计算日末时间
        # 如果时间段结束于当天的某个时间之前，则取当天结束时间（23:59:59.999）；否则取时间段结束时间
        day_end = min(current_day.replace(hour=23, minute=59, second=59, microsecond=99), end_time)
        utc_dt = day_end.astimezone(timezone.utc)
        formatted_end = utc_dt.strftime("%Y-%m-%dT%H:%M:%SZ")

        query = f'''
                from(bucket: "zzz")
                  |> range(start: {formatted_start}, stop: {formatted_end})
                  |> filter(fn: (r) => r["_measurement"] == "t_YZZS_302B_Wp")
                  |> aggregateWindow(every: 15s, fn: mean, createEmpty: false)
                  |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
                  |> yield(name: "mean")
            '''
        # 输出结果
        print(f"日期: {current_day.date()}, 日初时间: {formatted_start}, 日末时间: {formatted_end}")
        client = InfluxDBClient(url='http://127.0.0.1:8086/', username='TJEDI', password='87654321', database='zzz')
        query_api = client.query_api()
        tables_q1 = query_api.query_data_frame(org='zhongtie', query=query)

        query = f'''
                        from(bucket: "zzz")
                          |> range(start: {formatted_start}, stop: {formatted_end})
                          |> filter(fn: (r) => r["_measurement"] == "t_YZZS_302B_Wq")
                          |> aggregateWindow(every: 15s, fn: mean, createEmpty: false)
                          |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
                          |> yield(name: "mean")
                    '''
        tables_q2 = query_api.query_data_frame(org='zhongtie', query=query)
        client.close()
        if len(tables_q1) != 0:
            temp1 = cai_pf(round(tables_q1["value"][0], 2), round(tables_q1["value"][len(tables_q1)-1], 2),
                  round(tables_q2["value"][0], 2), round(tables_q2["value"][len(tables_q2) - 1], 2))
            temp2 = cai_pf(round(0, 2), round(tables_q1["value"][len(tables_q1) - 1], 2),
                           round(0, 2), round(tables_q2["value"][len(tables_q2) - 1], 2))

            new_row = {'Date': current_day.date(), 'value': temp1, 'Monthly_Avg_To_Date_Corrected': temp2}
            df0 = pd.concat([df0, pd.DataFrame([new_row])], ignore_index=True)
    return df0

def get_month(start_str = "2024-09-02T16:00:00.000Z", time_str = "2024-09-07T16:00:00.000Z"):
    # 该函数用于获取 每月的 实际功率因数
    original_time = datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=timezone.utc)

    # 1. 当月月初零点时间
    first_day_of_month = original_time.replace(day=1, hour=0, minute=0, second=0, microsecond=0)

    # 2. 当日最新时间（即当日 23:59:59.999）
    last_moment_of_day = original_time.replace(hour=23, minute=59, second=59, microsecond=999999)

    # 3. 当月之前每个月的月初和月末时间
    # 注意：我们需要从年初开始遍历到当前月的前一个月
    year = original_time.year
    month = original_time.month
    previous_months = []

    for m in range(1, month + 1):
        # 构造月初和月末的时间
        first_day = datetime(year, m, 1, 0, 0, 0, tzinfo=timezone.utc)
        # 计算月末（注意2月要特别处理闰年）
        if m == 2:
            # 闰年2月有29天
            if (year % 4 == 0 and year % 100 != 0) or (year % 400 == 0):
                last_day = datetime(year, 2, 29, 23, 59, 59, 99, tzinfo=timezone.utc)
            else:
                last_day = datetime(year, 2, 28, 23, 59, 59, 99, tzinfo=timezone.utc)
        elif m == month:
            print(month)
            print(last_moment_of_day)
            last_day = last_moment_of_day
        else:
            # 其他月份直接取最后一天
            last_day = datetime(year, m + 1, 1, 0, 0, 0, tzinfo=timezone.utc) - timedelta(days=1)
            last_day = last_day.replace(hour=23, minute=59, second=59, microsecond=99)

        previous_months.append((first_day, last_day))

    # 打印结果
    print("当月月初零点时间:", first_day_of_month)
    print("当日最新时间:", last_moment_of_day)
    print("当月之前每个月的月初和月末时间:")

    df0 = pd.DataFrame(
        columns=['Date', 'value', 'Monthly_Avg_To_Date_Corrected'],  # 指定列属性
    )
    for first, last in previous_months:

        formatted_start = first.strftime("%Y-%m-%dT%H:%M:%SZ")
        formatted_end = last.strftime("%Y-%m-%dT%H:%M:%SZ")
        print(f"日初时间: {formatted_start}, 日末时间: {formatted_end}")
        query = f'''
                        from(bucket: "zzz")
                          |> range(start: {formatted_start}, stop: {formatted_end})
                          |> filter(fn: (r) => r["_measurement"] == "t_YZZS_302B_Wp")
                          |> aggregateWindow(every: 15s, fn: mean, createEmpty: false)
                          |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
                          |> yield(name: "mean")
                    '''
        client = InfluxDBClient(url='http://127.0.0.1:8086/', username='TJEDI', password='87654321', database='zzz')
        query_api = client.query_api()
        tables_q1 = query_api.query_data_frame(org='zhongtie', query=query)

        query = f'''
                                from(bucket: "zzz")
                                  |> range(start: {formatted_start}, stop: {formatted_end})
                                  |> filter(fn: (r) => r["_measurement"] == "t_YZZS_302B_Wq")
                                  |> aggregateWindow(every: 15s, fn: mean, createEmpty: false)
                                  |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
                                  |> yield(name: "mean")
                            '''
        tables_q2 = query_api.query_data_frame(org='zhongtie', query=query)
        client.close()
        if len(tables_q1) != 0:
            temp = cai_pf(round(tables_q1["value"][0], 2), round(tables_q1["value"][len(tables_q1) - 1], 2),
                          round(tables_q2["value"][0], 2), round(tables_q2["value"][len(tables_q2) - 1], 2))
            new_row = {'Date': first.date(), 'value': temp, 'Monthly_Avg_To_Date_Corrected': temp}
            print(new_row)
            df0 = pd.concat([df0, pd.DataFrame([new_row])], ignore_index=True)
    return df0


def main(startTime, stopTime, type):
    def change_item(table, row, content):
        original = table['value'][0]
        for item in range(1, table['value'].shape[0]):
            if table['value'][item] != original:
                row += 1
                workbook_tg2.write(row, 0,
                                   str(table['_time'][item].tz_convert('Asia/Shanghai').strftime('%Y-%m-%d %H:%M:%S')),
                                   style_h3)
                workbook_tg2.write(row, 1, content, style_h3)
                # workbook_tg2.write(row, 2, '能馈装置', style_h3)
                workbook_tg2.write(row, 2, '能馈无功输出上限', style_h3)
                t = '由' + str(original) + '修改为' + str(table['value'][item])
                workbook_tg2.write(row, 3, t, style_h3)
                Data3['first'].append(
                    str(tables_tf['_time'][item].tz_convert('Asia/Shanghai').strftime('%Y-%m-%d %H:%M:%S')))
                Data3['second'].append(content)
                Data3['third'].append('能馈无功输出上限')
                Data3['forth'].append(t)
                original = table['value'][item]
        return row
    # 初始化
    client = InfluxDBClient(url='http://127.0.0.1:8086/', username='TJEDI', password='87654321', database='zzz')
    query = f'''
        from(bucket: "zzz")
          |> range(start: {startTime}, stop: {stopTime})
          |> filter(fn: (r) => r["name"] == "Target_Factor")
          |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
          |> yield(name: "pivoted_data")
    '''
    query_api = client.query_api()
    tables_tf = query_api.query_data_frame(org='zhongtie', query=query)
    query = f'''
        from(bucket: "zzz")
          |> range(start: {startTime}, stop: {stopTime})
          |> filter(fn: (r) => r["name"] == "YZZS_301B_PF")
          |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
          |> yield(name: "pivoted_data")
    '''
    tables_pf301 = query_api.query_data_frame(org='zhongtie', query=query)

    query = f'''
            from(bucket: "zzz")
              |> range(start: {startTime}, stop: {stopTime})
              |> filter(fn: (r) => r["name"] == "YZZS_302B_PF")
              |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
              |> yield(name: "pivoted_data")
        '''
    tables_pf302 = query_api.query_data_frame(org='zhongtie', query=query)

    query = f'''
        from(bucket: "zzz")
          |> range(start: {startTime}, stop: {stopTime})
          |> filter(fn: (r) => r["name"] == "Mode_Type")
          |> aggregateWindow(every: 10s, fn: mean, createEmpty: false)
          |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
          |> yield(name: "pivoted_data")
    '''
    tables_type = query_api.query_data_frame(org='zhongtie', query=query)

    query = f'''
        from(bucket: "zzz")
          |> range(start: {startTime}, stop: {stopTime})
          |> filter(fn: (r) => r["name"] == "Q1limit")
          |> aggregateWindow(every: 10s, fn: mean, createEmpty: false)
          |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
          |> yield(name: "pivoted_data")
    '''
    tables_q1 = query_api.query_data_frame(org='zhongtie', query=query)

    query = f'''
        from(bucket: "zzz")
          |> range(start: {startTime}, stop: {stopTime})
          |> filter(fn: (r) => r["name"] == "Q2limit")
          |> aggregateWindow(every: 10s, fn: mean, createEmpty: false)
          |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
          |> yield(name: "pivoted_data")
    '''
    tables_q2 = query_api.query_data_frame(org='zhongtie', query=query)

    query = f'''
        from(bucket: "zzz")
          |> range(start: {startTime}, stop: {stopTime})
          |> filter(fn: (r) => r["name"] == "Q3limit")
          |> aggregateWindow(every: 10s, fn: mean, createEmpty: false)
          |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
          |> yield(name: "pivoted_data")
    '''
    tables_q3 = query_api.query_data_frame(org='zhongtie', query=query)

    client.close()

    # 从时间字符串中提取日期，并创建一个新的列来存储日期
    tables_tf["Date"] = pd.to_datetime(tables_tf["_time"]).dt.date
    # 按照日期分组，并计算每组的平均值
    daily_averages_tf = tables_tf.groupby("Date")["value"].mean().reset_index()


    daily_averages_pf301 = get_day(startTime, stopTime)
    daily_averages_pf302 = get_day(startTime, stopTime)

    # #    -----------------------------分割线-----------------------------------
    workbook_new = xlwt.Workbook()
    new_file_name = ".xlsx"
    now = str(datetime.now()).replace(' ', '').replace(':', '-') + new_file_name
    basepath = os.getcwd()
    target_file_path = os.path.join(basepath, "dist", now)
    workbook_tg = workbook_new.add_sheet('功率因数报表', cell_overwrite_ok=True)
    row = 0
    #
    # 创建一个样式对象
    style_title = xlwt.XFStyle()
    # 创建一个对齐对象
    alignment = xlwt.Alignment()
    # 设置水平和垂直居中
    alignment.horz = xlwt.Alignment.HORZ_CENTER
    alignment.vert = xlwt.Alignment.VERT_CENTER
    # 将对齐对象添加到样式中
    style_title.alignment = alignment
    # 创建Font对象，设置字号为10（Excel中的字号），并加粗
    font_12 = xlwt.Font()
    font_12.name = 'Arial'  # 设置字体名称，可选
    font_12.height = 12 * 20  # 设置字号为10，因为height的单位是1/20的点
    font_12.bold = True  # 设置加粗
    style_title.font = font_12
    workbook_tg.col(0).width = 256 * 12
    workbook_tg.col(1).width = 256 * 20
    workbook_tg.col(2).width = 256 * 16
    workbook_tg.col(3).width = 256 * 16
    workbook_tg.col(4).width = 256 * 18
    workbook_tg.col(5).width = 256 * 16
    workbook_tg.col(6).width = 256 * 16

    workbook_tg.write_merge(row, row, 0, 6, '功率因数日记录报表', style_title)
    style_h3 = xlwt.XFStyle()
    alignment = xlwt.Alignment()
    alignment.horz = xlwt.Alignment.HORZ_CENTER
    alignment.vert = xlwt.Alignment.VERT_CENTER
    style_h3.alignment = alignment
    font_10 = xlwt.Font()
    font_10.height = 10 * 20
    style_h3.font = font_10
    workbook_tg.write(row + 1, 0, '产生日期:', style_h3)
    time_now = time.strftime('%Y-%m-%d', time.localtime())
    workbook_tg.write_merge(row + 1, row + 1, 2, 6, time_now, style_h3)
    workbook_tg.write(row + 2, 0, '时间:', style_h3)
    time_temp = startTime + ' 至 ' + stopTime
    workbook_tg.write_merge(row + 2, row + 2, 2, 6, time_temp, style_h3)

    # 创建边框样式
    borders = xlwt.Borders()
    # 设置边框的样式为细线
    borders.left = xlwt.Borders.THIN
    borders.right = xlwt.Borders.THIN
    borders.top = xlwt.Borders.THIN
    borders.bottom = xlwt.Borders.THIN
    style_h3.borders = borders

    workbook_tg.write(row + 3, 0, '日期:', style_h3)
    workbook_tg.write(row + 3, 1, '考核点', style_h3)
    workbook_tg.write(row + 3, 2, '期望功率因数', style_h3)
    workbook_tg.write(row + 3, 3, '实际功率因数', style_h3)
    workbook_tg.write(row + 3, 4, '当日是否达标', style_h3)
    workbook_tg.write(row + 3, 5, '累计功率因数', style_h3)
    workbook_tg.write(row + 3, 6, '累计是否达标', style_h3)
    row += 4
    Data = {'first':[],'second': [],'third':[],'forth':[],'fifth':[],'sixth':[],'seventh':[]}
    Data2 = {'first': [],'second': [], 'third': [], 'forth': [], 'fifth': []}
    Data3 = {'first': [], 'second': [], 'third': [], 'forth': []}
    Data4 = {'first': [], 'second': [], 'third': []}
    for i in range(daily_averages_tf.shape[0]):
        workbook_tg.write(row, 0, daily_averages_tf.iloc[i]['Date'].strftime("%Y-%m-%d"), style_h3)
        workbook_tg.write(row, 1, '鱼珠主变电所33kV Ⅰ段母线进线', style_h3)
        workbook_tg.write(row, 2, str(round(daily_averages_tf.iloc[i]['value'], 3)), style_h3)

        workbook_tg.write(row, 3, str(round(daily_averages_pf301.iloc[i]['value'], 3)), style_h3)

        Data['first'].append(daily_averages_tf.iloc[i]['Date'].strftime("%Y-%m-%d"))
        Data['second'].append('鱼珠主变电所33kV Ⅰ段母线进线')
        Data['third'].append(str(round(daily_averages_tf.iloc[i]['value'], 3)))
        Data['forth'].append(str(round(daily_averages_pf301.iloc[i]['value'], 3)))
        if float(daily_averages_pf301.iloc[i]['value']) > 0.9:
            workbook_tg.write(row, 4, '已达标', style_h3)
            Data['fifth'].append('已达标')
        else:
            workbook_tg.write(row, 4, '未达标', style_h3)
            Data['fifth'].append('未达标')
        workbook_tg.write(row, 5, str(round(daily_averages_pf301.iloc[i]['Monthly_Avg_To_Date_Corrected'], 3)), style_h3)
        Data['sixth'].append(str(round(daily_averages_pf301.iloc[i]['Monthly_Avg_To_Date_Corrected'], 3)))
        if float(daily_averages_pf301.iloc[i]['Monthly_Avg_To_Date_Corrected']) > 0.9:
            workbook_tg.write(row, 6, '已达标', style_h3)
            Data['seventh'].append('已达标')
        else:
            workbook_tg.write(row, 6, '未达标', style_h3)
            Data['seventh'].append('未达标')
        row += 1

        workbook_tg.write(row, 0, daily_averages_tf.iloc[i]['Date'].strftime("%Y-%m-%d"), style_h3)
        workbook_tg.write(row, 1, '鱼珠主变电所33kV Ⅱ段母线进线', style_h3)
        workbook_tg.write(row, 2, str(round(daily_averages_tf.iloc[i]['value'], 3)), style_h3)
        workbook_tg.write(row, 3, str(round(daily_averages_pf302.iloc[i]['value'], 3)), style_h3)
        Data['first'].append(daily_averages_tf.iloc[i]['Date'].strftime("%Y-%m-%d"))
        Data['second'].append('鱼珠主变电所33kV Ⅱ段母线进线')
        Data['third'].append(str(round(daily_averages_tf.iloc[i]['value'], 3)))
        Data['forth'].append(str(round(daily_averages_pf302.iloc[i]['value'], 3)))
        if float(daily_averages_pf302.iloc[i]['value']) > 0.9:
            workbook_tg.write(row, 4, '已达标', style_h3)
            Data['fifth'].append('已达标')
        else:
            workbook_tg.write(row, 4, '未达标', style_h3)
            Data['fifth'].append('未达标')
        workbook_tg.write(row, 5, str(round(daily_averages_pf302.iloc[i]['Monthly_Avg_To_Date_Corrected'], 3)),
                          style_h3)
        Data['sixth'].append(str(round(daily_averages_pf302.iloc[i]['Monthly_Avg_To_Date_Corrected'], 3)))
        if float(daily_averages_pf302.iloc[i]['Monthly_Avg_To_Date_Corrected']) > 0.9:
            workbook_tg.write(row, 6, '已达标', style_h3)
            Data['seventh'].append('已达标')
        else:
            workbook_tg.write(row, 6, '未达标', style_h3)
            Data['seventh'].append('未达标')
        row += 1

    row += 1
    workbook_tg.write_merge(row, row, 0, 6, '功率因数月记录报表', style_title)
    workbook_tg.write(row + 1, 0, '产生日期:', style_h3)
    time_now = time.strftime('%Y-%m-%d', time.localtime())
    workbook_tg.write_merge(row + 1, row + 1, 1, 4, time_now, style_h3)
    workbook_tg.write(row + 2, 0, '时间:', style_h3)
    time_temp = startTime + ' 至 ' + stopTime
    workbook_tg.write_merge(row + 2, row + 2, 1, 4, time_temp, style_h3)
    workbook_tg.write(row + 3, 0, '日期:', style_h3)
    workbook_tg.write(row + 3, 1, '考核点', style_h3)
    workbook_tg.write(row + 3, 2, '考核功率因数', style_h3)
    workbook_tg.write(row + 3, 3, '实际功率因数', style_h3)
    workbook_tg.write(row + 3, 4, '是否达标', style_h3)
    row += 4

    df_filtered301 = tables_pf301

    df_filtered301['_time'] = pd.to_datetime(df_filtered301['_time'])
    df_filtered301['_time'] = df_filtered301['_time'].dt.tz_localize(None)
    monthly_averages301 = df_filtered301.groupby(df_filtered301['_time'].dt.to_period('M'))['value'].mean().reset_index()

    monthly_averages302 = get_month(startTime, stopTime)
    for i in range(monthly_averages301.shape[0]):
        workbook_tg.write(row, 0, str(monthly_averages302.iloc[i]['Date']), style_h3)
        workbook_tg.write(row, 1, '鱼珠主变电所33kV Ⅱ段母线进线', style_h3)
        workbook_tg.write(row, 2, '0.90', style_h3)
        workbook_tg.write(row, 3, str(round(monthly_averages302.iloc[i]['value'], 4)), style_h3)
        Data2['first'].append(str(monthly_averages302.iloc[i]['Date'].strftime("%Y-%m-%d")))
        Data2['second'].append('鱼珠主变电所33kV Ⅱ段母线进线')
        Data2['third'].append('0.90')
        Data2['forth'].append(str(round(monthly_averages302.iloc[i]['value'], 4)))
        if float(monthly_averages302.iloc[i]['value']) > 0.9:
            workbook_tg.write(row, 4, '已达标', style_h3)
            Data2['fifth'].append('已达标')
        else:
            workbook_tg.write(row, 4, '未达标', style_h3)
            Data2['fifth'].append('未达标')
    # # #    -----------------------------分割线-----------------------------------
    row = 0
    workbook_tg2 = workbook_new.add_sheet('操作记录', cell_overwrite_ok=True)
    workbook_tg2.col(0).width = 256 * 20
    workbook_tg2.col(1).width = 256 * 28
    workbook_tg2.col(2).width = 256 * 24
    workbook_tg2.col(3).width = 256 * 22
    #
    #
    workbook_tg2.write_merge(row, row, 0, 3, '网络动态无功平衡控制操作记录', style_title)
    workbook_tg2.write(row + 1, 0, '日期:', style_h3)
    workbook_tg2.write(row + 1, 1, '设备位置', style_h3)
    workbook_tg2.write(row + 1, 2, '参数类型', style_h3)
    workbook_tg2.write(row + 1, 3, '操作记录', style_h3)
    row += 1

    original_tf = tables_tf['value'][0]
    for item in range(1, tables_tf['value'].shape[0]):
        if tables_tf['value'][item] != original_tf:
            row += 1
            workbook_tg2.write(row, 0,
                               str(tables_tf['_time'][item].tz_convert('Asia/Shanghai').strftime('%Y-%m-%d %H:%M:%S')),
                               style_h3)
            workbook_tg2.write(row, 1, '110kV鱼珠主变电所ⅠⅡ段进线', style_h3)
            workbook_tg2.write(row, 2, '期望功率因数', style_h3)
            t = '由' + str(original_tf) + '修改为' + str(tables_tf['value'][item])
            workbook_tg2.write(row, 3, t, style_h3)
            Data3['first'].append(str(tables_tf['_time'][item].tz_convert('Asia/Shanghai').strftime('%Y-%m-%d %H:%M:%S')))
            Data3['second'].append('110kV鱼珠主变电所ⅠⅡ段进线')
            Data3['third'].append('期望功率因数')
            Data3['forth'].append(t)
            original_tf = tables_tf['value'][item]

    row = change_item(tables_q1, row, '裕丰围Ⅰ段母线能馈装置')
    row = change_item(tables_q2, row, '科丰路Ⅱ段母线能馈装置')
    row = change_item(tables_q3, row, '水西北Ⅱ段母线能馈装置')



     # ---------------------------------分割线-------------------------------
    row = 0
    # workbook_tg3 = workbook_new.add_sheet('策略', cell_overwrite_ok=True)
    workbook_tg3 = workbook_tg2
    workbook_tg3.col(5).width = 256 * 24
    workbook_tg3.col(6).width = 256 * 24
    workbook_tg3.col(7).width = 256 * 40

    workbook_tg3.write_merge(row, row, 5, 7, '拟采用各无功补偿策略设置值', style_title)
    workbook_tg3.write(row + 1, 5, '位置', style_h3)
    workbook_tg3.write(row + 1, 6, '模式', style_h3)
    workbook_tg3.write(row + 1, 7, '时间段', style_h3)
    row += 1

    original_item = tables_type['value'][0]
    start_period = str(tables_type['_time'][0].tz_convert('Asia/Shanghai').strftime('%Y-%m-%d %H:%M:%S'))
    for item in range(1,tables_type['value'].shape[0]):
        if tables_type['value'][item] != original_item:
            row += 1
            end_period = str(tables_type['_time'][item].tz_convert('Asia/Shanghai').strftime('%m-%d %H:%M:%S'))
            t = start_period + ' 至 ' + end_period
            workbook_tg3.write(row, 5, '110kV鱼珠主变电所', style_h3)
            Data4['first'].append('110kV鱼珠主变电所')
            if tables_type['value'][item-1] == 0.0:
                workbook_tg3.write(row, 6, '停止', style_h3)
                Data4['second'].append('停止')
            elif tables_type['value'][item - 1] == 1.0:
                workbook_tg3.write(row, 6, '模式1：功率因数优先', style_h3)
                Data4['second'].append('模式1：功率因数优先')
            elif tables_type['value'][item - 1] == 2.0:
                workbook_tg3.write(row, 6, '模式2：损耗优先', style_h3)
                Data4['second'].append('模式2：损耗优先')
            else:
                workbook_tg3.write(row, 6, '非运营时间补偿', style_h3)
                Data4['second'].append('非运营时间补偿')
            workbook_tg3.write(row, 7, t, style_h3)
            Data4['third'].append(t)
            start_period = str(tables_type['_time'][item].tz_convert('Asia/Shanghai').strftime('%Y-%m-%d %H:%M:%S'))
            original_item = tables_type['value'][item]
    savePath = os.path.join(target_file_path)
    workbook_new.save(savePath)

    if type == 'POST':
        savePath = os.path.join(target_file_path)
        workbook_new.save(savePath)
        return savePath
    else:

        return Data, Data2, Data3, Data4



from flask import Flask, jsonify, request, make_response, send_file
from flask_cors import CORS

app = Flask(__name__)
CORS(app, resources={r"/*": {"origins": ["http://localhost:8080"]}})

@app.route('/')
def hello_world():
    return 'Hello, World! This is Flask backend.'


@app.route('/api/file', methods=['POST'])
def handle_data():
    data = request.json  # 获取JSON格式的POST数据

    # 处理数据
    starttime = data.get('starttime')
    endtime = data.get('endtime')
    print(starttime)
    print(endtime)

    # 设置CORS响应头
    response = make_response(jsonify(data))
    response.headers['Access-Control-Allow-Origin'] = 'http://localhost:8080'

    file_path = main(starttime, endtime, 'POST')
    # 返回响应
    return send_file(f"{file_path}", as_attachment=True)
@app.route('/api/data', methods=['POST'])
def GET_data():
    data = request.json  # 获取JSON格式的POST数据
    starttime = data.get('starttime')
    endtime = data.get('endtime')

    response = make_response(jsonify(data))
    response.headers['Access-Control-Allow-Origin'] = 'http://localhost:8080'

    Data_T = main(starttime, endtime, 'get')
    Data = Data_T[0]
    Data2 = Data_T[1]
    Data3 = Data_T[2]
    Data4 = Data_T[3]

    return jsonify({'message': 'Data received', 'data': Data, 'month': Data2, 'operation': Data3, 'strategy': Data4}), 200


if __name__ == '__main__':
    import subprocess
    # current_working_directory = os.path.abspath('.')
    # parent_directory = os.path.dirname(current_working_directory)
    # subdir2 = 'backend3\\frontend'
    # cwd_path = os.path.join(parent_directory, subdir2)

    # subprocess.run(['npm', 'run', 'dev'], check=True, cwd=cwd_path, shell=True)
    # subprocess.Popen(['start', 'cmd', '/k', 'npm', 'run', 'dev'], cwd=cwd_path, shell=True)
    app.run(debug=True)