#!/usr/bin/env python
# encoding: utf-8
'''
@author: songyunlong
@contact: 1243049371@qq.com
@software: Pycharm
@file: datagenerate4
@time: 2019/8/6 下午4:50
'''
import pickle
from collections import Counter
import os
import numpy as np
import pandas as pd
import tensorflow as tf
from Training import TrainingModel
#最有半径散列表
#{0:0.01, 1:0.05, 2:3.261155, 3:4.484088, 4:7.174541, 5:17.936353,
# 6:20.0, 7:23.915137, 8:35.872705, 9:71.745411, 10:107.618116,
# 11:143.490822, 12:179.363527, 13:215.236233, 14:251.108938}
def guiyi(dataset):
    '''
    对带标签的数据集进行特征归一化
    :param dataset: 带标签的数据集
    :return: 归一化后的特征/标签矩阵
    '''
    feature_min = np.min(dataset[:, :-1], axis=0)
    feature_max = np.max(dataset[:, :-1], axis=0)
    feature_guiyi = (dataset[:, :-1] - feature_min) / (feature_max - feature_min)
    dataset_guiyi = np.hstack((feature_guiyi, dataset[:, -1][:, np.newaxis]))
    return dataset_guiyi

def fft_transformer(dataset, N):
    '''
    对矩阵中各行按照指定点数做FFT变换
    :param dataset: 待处理矩阵
    :param N: 变换后点数
    :return: 处理后矩阵
    '''
    fft_abs = np.abs(np.fft.fft(a=dataset, n=N, axis=1))
    return fft_abs

def LoadFile(p):
    '''
    读取文件
    :param p: 数据集绝对路径
    :return: 数据集
    '''
    data = np.array([0])
    try:
        with open(p, 'rb') as file:
            data = pickle.load(file)
    except:
        print('文件不存在!')
    finally:
        return data

def SaveFile(data, savepickle_p):
        '''
        存储整理好的数据
        :param data: 待存储数据
        :param savepickle_p: pickle后缀文件存储绝对路径
        :return: None
        '''
        if not os.path.exists(savepickle_p):
            with open(savepickle_p, 'wb') as file:
                pickle.dump(data, file)
def data4gen(train_set, test_set):
    ''''''
    training_model4 = TrainingModel(train_set, test_set)
    training_model4.training()

if __name__ == '__main__':
    p = '/home/xiaosong/桌面/PNY_all.pickle'
    dataset = LoadFile(p=p)
    dict = Counter(dataset[:, -1])
    r = list(dict.keys())
    num = [i for i in range(15)]
    r.sort()
    dict = {key: value for key, value in zip(num, r)}
    # print(dict)
    # 对第4个最有半径进行数据生成
    # 提取第4组数据
    data4 = pd.DataFrame(data=dataset, columns=['f' + str(i) for i in range(1, dataset.shape[-1])] + ['r'])
    data4 = data4.loc[data4['r'] == dict[4]]
    data4 = data4.values
    # print(data4.shape) #(2521, 25)
    dataset_4feature, dataset_dense, label = data4[:, :4], data4[:, 4:-1], data4[:, -1][:,np.newaxis]
    dataset_fft = fft_transformer(dataset_dense, 100)
    dataset = np.hstack((dataset_4feature, dataset_fft, label))
    dataset_guiyi = guiyi(dataset)
    # print(dataset_guiyi.shape)
    # SaveFile(data=dataset_guiyi, savepickle_p='/home/xiaosong/桌面/pnydata/data4.pickle')
    #训练data4的数据生成模型
    rng = np.random.RandomState(0)
    np.random.shuffle(dataset_guiyi)
    data4gen(train_set=dataset_guiyi[:2000, :], test_set=dataset_guiyi[2000:, :-1])

