from fastapi import APIRouter,Depends,Request
import datetime
from sqlalchemy.orm import Session
from loguru import logger
from model.downhole_report import GroundReport,FormBody,create_report
from db.mysql import get_db
from vendor.extend.courier import *
from vendor.extend.conversion import is_positive_integer,calculate_time_difference
from model.downhole_mine import GroundMine
from model.downmine_data import DownmineData
from model.downhole_colliery import GroundColliery
from sqlalchemy import desc,and_,asc
from model.downhole_sensor import GroundSensor
from vendor.library.strain3d.stress_three import complete_stress_analysis


V3Analysis = APIRouter()


@V3Analysis.post('/report')
def report(itme:FormBody,request:Request,db: Session = Depends(get_db)):
    item_dict = itme.dict()
    current_timestamp=item_dict.get('end_time',0)
    one_week_ago=item_dict.get('start_time',0)
    if current_timestamp<=one_week_ago:
        return Error(msg='结束时间不能小于等于开始时间')


    minel_id=item_dict.get('mine_id',0)
    if not is_positive_integer(minel_id):
        return Error(msg='钻孔id参数错误')

    minel=db.query(GroundMine).filter_by(id=minel_id).first()
    if not minel:
        return Error(msg='钻孔不存在')

    sensor=db.query(GroundSensor).filter_by(mine_id=minel_id).all()
    if not sensor:
        return Error(msg='传感器不存在')

    colliery=db.query(GroundColliery).filter_by(id=minel.colliery_id).first()
    if not colliery:
        return Error(msg='煤矿不存在')
    # 初始基准波长 (10个参数，第一个是初始温度波长，后面9个是初始应力波长)
    initial_wavelengths_obj={}
    # 温度补偿系数
    coefficient_obj={}
    # 原始数据（Python list 形式）传感器应变标定矩阵（9*6矩阵）：
    stress_matrix_obj={}
    #传感器坐标转换矩阵（3*3矩阵）：
    transformation_matrix_obj={}
    # 传入的点数
    point_num = 10
    channel_arr=[]
    # 传感器数
    transducer={}
    for info in sensor:
        channel_num=info.chanel
        last_data=db.query(DownmineData.create_time).filter_by(line=channel_num).order_by(asc("id")).first()
        if not last_data:
            continue

        #初始波长截取时间:(小时，多少个小时)
        wavelength=int(info.wavelength)
        last_info=db.query(DownmineData.data).filter(and_(DownmineData.line == channel_num,DownmineData.create_time > last_data.create_time+(wavelength*3600))).order_by(asc("id")).first()
        if not last_info:
            continue

        # 初始基准波长 (10个参数，第一个是初始温度波长，后面9个是初始应力波长)
        initial_wavelengths_obj[channel_num] = last_info.data[0:point_num]
        # 温度补偿系数
        coefficient_obj[channel_num] = info.coefficient
        #传感器应变标定矩阵（9*6矩阵）：
        stress_matrix_obj[channel_num] = info.stress_matrix
        #传感器坐标转换矩阵（3*3矩阵）：
        transformation_matrix_obj[channel_num] = info.transformation_matrix
        channel_arr.append(channel_num)
        transducer[channel_num]={
            "id":info.id,
            "colliery_id":info.colliery_id,
            "mine_id":info.mine_id,
            "chanel":info.chanel,
            "sensor_number":info.sensor_number,
            "setup_time":info.setup_time,
            "sensor_status":info.sensor_status
        }


    data_list=db.query(DownmineData.line,DownmineData.data,DownmineData.create_time).filter(and_(DownmineData.create_time > one_week_ago,DownmineData.create_time < current_timestamp,DownmineData.line.in_(channel_arr))).order_by(asc("id")).all()
    sensor_list={}
    for row in data_list:
        measured_wavelengths=row.data[0:point_num]
        initial_wavelengths=initial_wavelengths_obj[row.line]
        KT=coefficient_obj[row.line]
        D_list=stress_matrix_obj[row.line]
        A_list=transformation_matrix_obj[row.line]
        final_data=complete_stress_analysis(
            measured_wavelengths=measured_wavelengths,
            initial_wavelengths=initial_wavelengths,
            KT=KT,
            constitutive_matrix=D_list,
            adjustment_matrix=A_list
        )
        stress=final_data['主应力大小']
        channel_num=row.line
        if channel_num not in sensor_list:
            sensor_list[channel_num]=[]
        sensor_list[channel_num].append([
            stress[0],
            stress[1],
            stress[2],
            row.create_time
        ])

    txt='我在煤矿布置了地面长时孔的应力监测，'
    txt+=f"其中5号钻孔有{len(sensor_list)}个传感器，"
    txt+="分别为："
    keys_list = list(sensor_list.keys())
    sensor_arr=[]
    for item in keys_list:
        sensor_info=transducer[item]
        sensor_arr.append(sensor_info['sensor_number'])
    txt+=f"{'、'.join(map(str, sensor_arr))}传感器"
    txt+="。\n"
    txt+=f"{'、'.join(map(str, sensor_arr))}传感器"
    dt_object = datetime.datetime.fromtimestamp(one_week_ago)
    formatted_date = dt_object.strftime('%Y年%m月%d日')
    txt+=f"应力监测时间范围涵盖 {formatted_date}"
    dt_object = datetime.datetime.fromtimestamp(current_timestamp)
    formatted_date = dt_object.strftime('%Y年%m月%d日')
    txt+=f"至 {formatted_date}，"
    txt+=f"总监测天数为 {calculate_time_difference(one_week_ago,current_timestamp,'days')} 天。"
    txt+="\n"
    for key, value in sensor_list.items():

        # 使用 max() 函数找出前三列中最大值所在的行
        #row_with_max_value = max(value, key=lambda row: max(row[:3]))
        ## 使用 max() 和 lambda 函数找出第一列最大值对应的行
        row_with_max_value = max(value, key=lambda x: x[0])
        max_value = max(row_with_max_value[0], row_with_max_value[1], row_with_max_value[2])
        unix_timestamp=row_with_max_value[3]
        # 将Unix时间戳转换为datetime对象
        dt_object = datetime.datetime.fromtimestamp(unix_timestamp)

        # 格式化输出为年-月-日 时:分:秒
        formatted_date = dt_object.strftime('%Y年%m月%d日%H时%M分')
        txt+=f"最大主应力的最大值为{max_value} MPa (发生在 {formatted_date})，"
        row_with_min_value = min(value, key=lambda row:row[0])
        min_value = min(row_with_min_value[0], row_with_min_value[1], row_with_min_value[2])
        unix_timestamp=row_with_min_value[3]
        # 将Unix时间戳转换为datetime对象
        dt_object = datetime.datetime.fromtimestamp(unix_timestamp)

        # 格式化输出为年-月-日 时:分:秒
        formatted_date = dt_object.strftime('%Y年%m月%d日%H时%M分')
        txt+=f"最大主应力的最小值为{min_value} MPa (发生在 {formatted_date})，"

        # 使用列表推导式和sum()函数计算第一列的总和
        total = sum(row[0] for row in value)
        count = len(value)
        # 计算平均值
        average = total / count if count > 0 else 0
        txt+=f"最大主应力的平均值为{average} MPa ，"
        # 计算第一列中每个元素与目标值的差的绝对值
        differences = [abs(row[0] - average) for row in value]
        # 找到最小差值的索引
        min_diff_index = differences.index(min(differences))
        closest_row=value[min_diff_index]
        unix_timestamp=closest_row[3]
        # 将Unix时间戳转换为datetime对象
        dt_object = datetime.datetime.fromtimestamp(unix_timestamp)
        # 格式化输出为年-月-日 时:分:秒
        formatted_date = dt_object.strftime('%Y年%m月%d日%H时%M分')
        txt+=f"最接近最大主应力平均值的应力值为{closest_row[0]}MPa(发生在 {formatted_date})；"

        # 初始化最大差值为负无穷，以及对应的行号
        max_diff = float('-inf')
        #row_with_max_diff = None
        max_difference_row=None
        # 初始化最小差值为正无穷大，以便任何实际的差值都能比它小
        min_difference = float('inf')
        min_difference_row = None
        # 平均绝对差值
        average_abs_diff=None
        if len(value) >= 2:
            col2=col1=[]
            # 遍历列表中的每一行
            for i, row in enumerate(value):
                # 计算第一列和第二列的差值
                diff = row[0] - row[1]
                # 如果当前差值大于已记录的最大差值，则更新最大差值和行号
                if diff > max_diff:
                    max_diff = diff
                    max_difference_row=row
                    #row_with_max_diff = i
                difference = abs(row[0] - row[1])  # 计算第一列和第二列的差值（取绝对值）
                if difference < min_difference:  # 如果找到更小的差值
                    min_difference = difference  # 更新最小差值
                    min_difference_row = row  # 记录这行
                    # min_difference_index = i  # 记录这行的索引
                # 获取第一列和第二列的值列表
                col1.append(row[0])
                col2.append(row[1])

            # 计算最大值与最小值的差的绝对值
            abs_diff_col1 = abs(max(col1) - min(col1))
            abs_diff_col2 = abs(max(col2) - min(col2))

            # 计算平均绝对差异
            average_abs_diff = (abs_diff_col1 + abs_diff_col2) / 2

        if max_difference_row:
            unix_timestamp=max_difference_row[3]
            # 将Unix时间戳转换为datetime对象
            dt_object = datetime.datetime.fromtimestamp(unix_timestamp)
            # 格式化输出为年-月-日 时:分:秒
            formatted_date = dt_object.strftime('%Y年%m月%d日%H时%M分')
            txt+=f"最大主应力与最小主应力的的最大差值为{max_diff}MPa(发生在 {formatted_date})；"

        if min_difference_row:
            unix_timestamp=min_difference_row[3]
            # 将Unix时间戳转换为datetime对象
            dt_object = datetime.datetime.fromtimestamp(unix_timestamp)
            # 格式化输出为年-月-日 时:分:秒
            formatted_date = dt_object.strftime('%Y年%m月%d日%H时%M分')
            txt+=f"最大主应力的最小差值为{min_difference}MPa(发生在 {formatted_date})；"

        if average_abs_diff:
            txt+=f"最大主应力的最小差值为{average_abs_diff}MPa ，"

            closest_diff = float('inf')
            closest_row = None

            for row in value:
                diff = abs(row[0] - average_abs_diff)
                if diff < closest_diff:
                    closest_diff = diff
                    closest_row = row
            if closest_row:
                unix_timestamp=min_difference_row[3]
                # 将Unix时间戳转换为datetime对象
                dt_object = datetime.datetime.fromtimestamp(unix_timestamp)
                # 格式化输出为年-月-日 时:分:秒
                formatted_date = dt_object.strftime('%Y年%m月%d日%H时%M分')
                txt+=f"最接近最大主应力平均差值的应力值为{closest_row[0]}MPa(发生在 {formatted_date})。"
        txt+="\n"
    txt+=f"大模型请注意，以上是{colliery.identifier}工作面的钻孔应力监测情况，请进行深度推理分析。不需要提建议。"

    report=GroundReport(prompt=txt,create_time=int(time.time()))
    try:
        # 添加提示词
        db.add(report)
        db.flush()
        db.refresh(report)
        db.commit()
    except Exception as e:
        # 手动回滚会话并重新抛出异常
        db.rollback()
        logger.error("工作面信息 添加 出错:{}", e)
        return Error(msg='创建报告失败',data=str(e))

    ret_rep=create_report(report.id)
    if ret_rep:
        domain = str(request.base_url)
        return Success(msg='报告生成成功',data={
            'http_path':domain+ret_rep['http_path']
        })
    else:
        return Error(msg='报告生成失败')

