from pitchSrc.pitchHis.initData import initData
from pitchSrc.pitchHis.cleanProcess import cleanProcess
from pitchSrc.pitchHis.groupProcess import groupProcess
import shutil
import os
import numpy as np
from toolsSrc.status.status import Status
from common.config import pitchConfig as Config
import pandas as pd
import time

'''##########################################################################################
针对 history 文件夹中的数据运行 pitchHis 中的 initData，cleanProcess，groupProcess
'''
def getGroupDf():
    files = os.listdir(Config.historyDataRoot)  # 获得文件名列表
    if len(files) > 0:
        initData()
        cleanProcess()
        groupProcess()
        df = pd.read_csv(Config.hisGroupProcess)
    else:
        df = pd.DataFrame(columns=[Config.PitchAngle, Config.WindSpeed], index=None)
    return df

'''
生成 ptichAngle, windSpeed 范围框
'''
def getPitchWindRectangle(df):
    # 得到范围框，这个范围应该适当缩小一点，否则可能永远也无法主动探索完毕
    pitch_min, pitch_max, wind_min, wind_max = df[Config.PitchAngle].min() + 0.1, df[Config.PitchAngle].max() - 0.1, df[
        Config.WindSpeed].min() + 0.2, df[Config.WindSpeed].max() - 0.2
    # 保证最小范围，是不是应该写到 config里面？？？???????
    pitch_min, pitch_max, wind_min, wind_max = pitch_min if pitch_min < 0 else 0, pitch_max if pitch_max > 5 else 5, wind_min if wind_min < 3 else 3, wind_max if wind_max > 10 else 10
    return [pitch_min, pitch_max, wind_min, wind_max]

'''
生成确实点的 pitchAngle
'''
def getLackPitch(df_pitchWind):
    # 得到ptichAngle, windSpeed范围框，
    [pitch_min, pitch_max, wind_min, wind_max] = getPitchWindRectangle(df_pitchWind)

    # 找到缺失的pitchAngle
    windCount_full = round(
        (wind_max - wind_min) / Config.scale_WindSpeed) + 1  # 全的windSpeed格点数，例如，每pitchAngle下应该有24个windSpeed格点
    df_windCount = df_pitchWind.groupby([Config.PitchAngle], as_index=False)[
        Config.WindSpeed].count()  # Config.WindSpeed列变成了同一桨距角下风速点的数量
    df_lackPitch = df_windCount[df_windCount[Config.WindSpeed] < windCount_full]  # 如果没有缺的怎么办，会返回带 columns 空dataFrame
    df_lackPitch = df_lackPitch.drop(Config.WindSpeed, axis=1)

    # 找到整体缺失的pitchAngle
    pitchAngles = list(map(lambda x: '{:g}'.format(x),
                           df_lackPitch[Config.PitchAngle]))  # 目前有哪些pitchAngle格点，为了匹配成功，用'{:g}'.format() 去掉多余的0
    for pitch in np.linspace(pitch_min, pitch_max,
                             round((pitch_max - pitch_min) / Config.scale_PitchAngle + 1)):  # 全的pitchAngle格点
        if '{:g}'.format(round(pitch, 4)) not in pitchAngles:  # 浮点数有可能出现 .0000000002的情况，使用round去掉这种情况
            # df_lackPitch = df_lackPitch.append({Config.PitchAngle: pitch}, ignore_index=True)
            df_lackPitch = pd.concat([df_lackPitch, pd.DataFrame({Config.PitchAngle: [pitch]})], ignore_index=True)

    # 列名改为与PLC中一致，返回
    df_lackPitch.columns = [Config.pitchPlcKey]
    return df_lackPitch

'''
1. 针对 history 文件夹中的数据生成 groupProcess.csv 并读取
2. 检查缺少的 pitchAngle 数据
3. 返回缺少的 pitchAngle 数据 df_lackPitch
'''
@Status.changeStatus(4,1)
def preparePitchExploreData():
    # 用现有的his数据,通过initData,cleanProcess,groupProcess,生成windSpeed与pitchAngle的group格点
    df_group = getGroupDf()
    # 生成缺少的 pitchAngle
    df_lackPitch = getLackPitch(df_group)
    time.sleep(10)
    # 返回
    return df_lackPitch

#
# if __name__ == '__main__':
#     preparePitchExploreData()
'''##########################################################################################
将 df_pitchExplore中第 i 条数据写入pitchUpload.csv(opcUa上传频率比价高，会马上上传）
'''
def pitchExploreUpOne(df_pitchExplore, i):

    pitchAngle = df_pitchExplore.iloc[i, 0]  # df_pitchExplore中第一行数据（主动探索的一个桨距角）
    df_pitchUploadData = pd.DataFrame({Config.pitchPlcKey: [pitchAngle]})
    df_pitchUploadData.to_csv(Config.pitchUpload)

'''
根据时间戳，在数据池 pool 中取一个新生成的数据文件，拷贝到historyDataRoot文件夹中
'''
def pitchExploreDownOne():
    startTime = time.time()
    if not os.path.exists(Config.historyDataRoot): os.mkdir(Config.historyDataRoot)
    while True:
        files = os.listdir(Config.poolRoot)
        times = list(map(lambda fn: os.path.getmtime(Config.poolRoot + '/' + fn), files))
        maxIndex = np.argmax(times)
        if times[maxIndex] > startTime:
            shutil.copy(Config.poolRoot + files[maxIndex], Config.historyDataRoot)
            break
        time.sleep(1)  # 等待时间不能小于 pool 采集一个文件的时间， 用在config中设置么？？？？？？？？？？？？？？？？？？？？

'''
1. 分批次上传 df_ptichExplore 中的 pitch 数据
2. 每上传一个 pitch，采集一个数据文件，保存到history中
'''
def pitchExploreUpDown(df_pitchExplore):
    length = len(df_pitchExplore)
    for i in range(length):
        pitchExploreUpOne(df_pitchExplore, i)  # 将第i几条记录上传
        pitchExploreDownOne()  # 下载一个数据文件

'''
1. 准备主动探索数据 df_lackPitch
2. 主动探索（多次，直到数据全部采集全，或者达到次数上限）
'''
@Status.changeDBStatus('pitch',4,1)
@Status.changeStatus(4,1)
def pitchExplore():
    df_lackPitch = preparePitchExploreData()
    loop = Config.maxExploreLoop  # 限定最大主动探索次数
    while len(df_lackPitch) > 0 and loop > 0:
        pitchExploreUpDown(df_lackPitch)
        df_lackPitch = preparePitchExploreData()
        loop -= 1