import torch
import torch.nn as nn
import matplotlib.pyplot as plt
import os
from torch.nn import functional as F
import numpy as np
import random
os.environ["KMP_DUPLICATE_LIB_OK"]="TRUE"

def seed_it(seed):
    random.seed(seed)
    os.environ["PYTHONSEED"] = str(seed)
    np.random.seed(seed)
    torch.cuda.manual_seed(seed)
    torch.cuda.manual_seed_all(seed)
    torch.backends.cudnn.deterministic = True #确定性固定
    torch.backends.cudnn.benchmark = True #False会确定性地选择算法，会降低性能
    torch.backends.cudnn.enabled = True  #增加运行效率，默认就是True
    torch.manual_seed(seed)




class Max_Min_Model(nn.Module):

    def __init__(self):
        super(Max_Min_Model,self).__init__()
        # self.fc1 = nn.Linear(8, 8)
        # self.fc2 = nn.Linear(8, 8)
        # self.fc3 = nn.Linear(8, 8)
        self.fc4 = nn.Linear(8, 4)

    def forward(self,wd_f):  # wd: 1x1, f: 4*1
        # out1=F.relu(self.fc1(wd_f))
        # out2 = F.relu(self.fc2(out1))
        # out3 = F.relu(self.fc3(out2))
        out = F.relu(self.fc4(wd_f))
        return out

if __name__ == '__main__':
    seed_it(1)
    net = Max_Min_Model()
    epochs =500
    lr = 0.01
    optimizer = torch.optim.SGD(net.parameters(), lr=lr)
    loss_func = nn.MSELoss()
    # labels=label_dataset
    # 数据
    wb_dataset = torch.tensor(
        [[4.0, 4.0, 4.0, 4.0], [2.0, 2.0, 2.0, 2.0], [3.0, 3.0, 3.0, 3.0], [2.0, 2.0, 2.0, 2.0], [2.0, 2.0, 2.0, 2.0], [1.0, 1.0, 1.0, 1.0], [7.0, 7.0, 7.0, 7.0], [7.0, 7.0, 7.0, 7.0], [6.0, 6.0, 6.0, 6.0], [1.0, 1.0, 1.0, 1.0], [2.0, 2.0, 2.0, 2.0], [6.0, 6.0, 6.0, 6.0], [7.0, 7.0, 7.0, 7.0], [4.0, 4.0, 4.0, 4.0], [5.0, 5.0, 5.0, 5.0], [3.0, 3.0, 3.0, 3.0], [5.0, 5.0, 5.0, 5.0], [4.0, 4.0, 4.0, 4.0], [6.0, 6.0, 6.0, 6.0], [6.0, 6.0, 6.0, 6.0], [6.0, 6.0, 6.0, 6.0], [7.0, 7.0, 7.0, 7.0], [1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0], [7.0, 7.0, 7.0, 7.0], [2.0, 2.0, 2.0, 2.0], [1.0, 1.0, 1.0, 1.0], [2.0, 2.0, 2.0, 2.0], [6.0, 6.0, 6.0, 6.0], [7.0, 7.0, 7.0, 7.0], [6.0, 6.0, 6.0, 6.0], [1.0, 1.0, 1.0, 1.0], [4.0, 4.0, 4.0, 4.0], [7.0, 7.0, 7.0, 7.0], [6.0, 6.0, 6.0, 6.0], [7.0, 7.0, 7.0, 7.0], [7.0, 7.0, 7.0, 7.0], [4.0, 4.0, 4.0, 4.0], [3.0, 3.0, 3.0, 3.0], [6.0, 6.0, 6.0, 6.0], [7.0, 7.0, 7.0, 7.0], [1.0, 1.0, 1.0, 1.0], [2.0, 2.0, 2.0, 2.0], [2.0, 2.0, 2.0, 2.0], [7.0, 7.0, 7.0, 7.0], [7.0, 7.0, 7.0, 7.0], [7.0, 7.0, 7.0, 7.0], [2.0, 2.0, 2.0, 2.0], [5.0, 5.0, 5.0, 5.0], [2.0, 2.0, 2.0, 2.0], [7.0, 7.0, 7.0, 7.0], [7.0, 7.0, 7.0, 7.0], [4.0, 4.0, 4.0, 4.0], [4.0, 4.0, 4.0, 4.0], [4.0, 4.0, 4.0, 4.0], [1.0, 1.0, 1.0, 1.0], [2.0, 2.0, 2.0, 2.0], [7.0, 7.0, 7.0, 7.0], [7.0, 7.0, 7.0, 7.0], [1.0, 1.0, 1.0, 1.0], [3.0, 3.0, 3.0, 3.0], [7.0, 7.0, 7.0, 7.0], [5.0, 5.0, 5.0, 5.0], [4.0, 4.0, 4.0, 4.0], [5.0, 5.0, 5.0, 5.0], [1.0, 1.0, 1.0, 1.0], [5.0, 5.0, 5.0, 5.0], [4.0, 4.0, 4.0, 4.0], [3.0, 3.0, 3.0, 3.0], [1.0, 1.0, 1.0, 1.0], [7.0, 7.0, 7.0, 7.0], [6.0, 6.0, 6.0, 6.0], [6.0, 6.0, 6.0, 6.0], [1.0, 1.0, 1.0, 1.0], [5.0, 5.0, 5.0, 5.0], [7.0, 7.0, 7.0, 7.0], [3.0, 3.0, 3.0, 3.0], [3.0, 3.0, 3.0, 3.0], [5.0, 5.0, 5.0, 5.0], [5.0, 5.0, 5.0, 5.0], [7.0, 7.0, 7.0, 7.0], [5.0, 5.0, 5.0, 5.0], [3.0, 3.0, 3.0, 3.0], [5.0, 5.0, 5.0, 5.0], [6.0, 6.0, 6.0, 6.0], [5.0, 5.0, 5.0, 5.0], [7.0, 7.0, 7.0, 7.0], [4.0, 4.0, 4.0, 4.0], [6.0, 6.0, 6.0, 6.0], [3.0, 3.0, 3.0, 3.0], [1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0], [2.0, 2.0, 2.0, 2.0], [6.0, 6.0, 6.0, 6.0], [3.0, 3.0, 3.0, 3.0], [3.0, 3.0, 3.0, 3.0], [7.0, 7.0, 7.0, 7.0], [3.0, 3.0, 3.0, 3.0], [3.0, 3.0, 3.0, 3.0], [1.0, 1.0, 1.0, 1.0]]
, requires_grad=True)
    f_dataset = torch.tensor([[0.7976588013629393, 1.4869478810662917, 4.864237109177295, 5.248194712527557], [1.5196755452985766, 4.81618339092786, 5.68041603469969, 6.972432264104814], [0.8484447065595724, 2.0737206128665155, 3.802997491142306, 9.834307573985495], [0.9395743821247704, 2.0450583071444948, 3.4311702611084116, 6.238366151308822], [0.4310283547062421, 3.087231411615253, 4.532879011733951, 5.114640011711632], [2.6050985504600255, 2.6383467943157903, 8.343780560593231, 8.603559288264716], [2.235697300226398, 3.9223148722307712, 8.26537736652263, 8.431275085526293], [5.563886001342338, 8.102053037488409, 9.801570433365224, 9.967805539084857], [0.316180084185812, 4.718584307367175, 7.6399618724697085, 8.75209117319505], [3.64932023193533, 4.402471561273709, 6.313493834250165, 6.975972641112725], [1.0633552523012224, 2.1082284202931367, 4.631652630394089, 5.803974613258234], [1.5627822315763262, 5.424498162240054, 5.940168653191515, 8.417375248684506], [3.7648531690077416, 4.393507332197335, 4.90612300059318, 6.586811089759344], [0.5661925691073955, 4.673104249978122, 6.770957028591081, 9.515777234180726], [2.1621826119727583, 2.199458004609698, 3.615585705222127, 8.814945392221201], [0.2935460938512091, 2.7824735324322702, 3.9552962304334507, 7.152617055452202], [5.204883360053048, 6.086902184390036, 9.110777449278556, 9.790688558961694], [2.7707706899938356, 4.5283399034258975, 4.945011826152287, 5.57430952507041], [1.3737803638190826, 4.18540029110546, 7.947629228828382, 9.170382253393122], [0.6178800212848878, 3.807494232052433, 5.167815795644025, 7.3180971875867815], [1.1294738487592415, 4.431002726485626, 5.449069671481839, 8.866153859698027], [1.0980398633114874, 3.2814302209823754, 3.8134971679840026, 5.76882826229263], [4.083720349966731, 5.168831970457578, 6.055172085815103, 6.908640019499535], [0.8148500811403347, 1.2386834413899361, 4.284668589492089, 8.008771703536665], [5.292989695601404, 5.330406304879473, 6.701812514230914, 9.78237863186052], [2.0508009668472327, 2.930517161487848, 5.272545025720491, 9.84019930107756], [1.5490397174677795, 3.549851333775844, 6.338468256857394, 6.420212611776143], [0.3928510916690897, 1.0495679106386435, 5.920917419044666, 9.856593383181128], [3.460643031456394, 7.472268982537029, 7.678221783169276, 7.910025074454107], [0.5384043331759403, 3.8857888745283917, 4.7972157519603815, 6.994521291573744], [2.1152581297030726, 2.3349939950689014, 3.3356862150643476, 5.301237988902175], [4.758346650510953, 6.282956534082128, 7.424222282650867, 8.338646990659944], [0.04786114358057314, 2.816157615749273, 3.856163596259783, 5.475703800368894], [0.7500508970171471, 2.179953809461277, 8.459755233867872, 8.67653537416], [0.3118086824270072, 2.2742371523337312, 3.6140733496009303, 8.41092822817596], [0.6186300233913222, 5.406605325007865, 5.695708529037315, 7.691499022373986], [2.887304541622666, 3.259841249953489, 7.505103938927756, 9.968933258508628], [2.251437415602923, 3.1834944373405403, 8.785254247743811, 9.587500212517954], [0.805847924121571, 4.89613001630849, 7.532305778255735, 9.705387903814865], [0.20554145129274892, 4.959241082584736, 7.311802721906604, 8.470028196411487], [3.6313090052845363, 6.134900568930028, 7.006002975698257, 9.233806129676816], [2.3633163333043083, 7.150378995573612, 7.882595873846093, 9.660071055887586], [0.28297266603059645, 3.2125784821001124, 3.892601861689451, 5.763059343778581], [0.4980602353662744, 2.891001251726647, 4.16496950396213, 8.327775378326276], [0.30242762218950725, 4.190287257362398, 6.003541051249463, 8.63938270429794], [0.1120923541085117, 0.9973383433795813, 2.0749001543762158, 4.9525716484505775], [1.2796269145349048, 5.384141133859517, 7.099306049417025, 8.718806044250472], [1.9538034460751452, 2.3670101877684058, 5.471021682595676, 6.71307648005595], [0.49182729441134554, 1.2677642758216479, 3.278570442391376, 7.749014226216819], [3.2692905993138357, 4.76815932380842, 4.887312183292417, 7.951915199750033], [1.260316825890313, 1.35716992592115, 6.074298064999242, 6.908867228995722], [1.2298831934284693, 2.5475118239600247, 5.810602331126341, 8.982576117780715], [1.4727537736357743, 3.7714345867777443, 4.538703711281308, 9.380150697825892], [0.06460040024803715, 3.7316613371427296, 6.69953904432666, 8.73867905577653], [1.1756079415074872, 1.4740280870716371, 3.244865252494983, 9.306931617361446], [0.042394859081555936, 2.5097904461330547, 5.555581733726753, 5.810241872266891], [0.4580920394625998, 2.4842197934824006, 7.971508447597881, 9.111475502577324], [0.12915182189052743, 3.855137018584469, 4.01332654043809, 8.105074866505621], [4.654355274652442, 7.757186919992804, 8.939307448824813, 9.197756775733128], [2.1892921994268786, 3.483594991458794, 3.7392099906212586, 5.04604482738217], [1.5121928492509507, 1.617402924605451, 8.025579741517696, 8.14800774063896], [2.26375018911369, 3.2859469443475477, 6.739545757819556, 8.72715033385814], [0.8546193658227208, 1.5301248232513986, 2.011052837172084, 5.393122262742594], [0.5350412435028606, 0.5362670225008637, 4.051479532876013, 4.939636410631664], [2.0241214026080065, 4.041783050938754, 9.284905677324774, 9.44439211612432], [2.7376461798648166, 7.457651955285664, 8.403364173973992, 8.99716104467647], [1.3484575995225057, 2.506382324849344, 4.222946829169475, 5.659227462419426], [0.033299146133356494, 5.474385778055861, 8.426879448425671, 9.971606444129254], [0.5771611250758157, 0.9529061377480197, 4.132810734535934, 8.433496534746078], [1.3129610024196214, 1.3958063798507991, 1.9336911601338247, 3.307726780879874], [0.43785693758642164, 2.5084372505166455, 4.086794916702045, 7.554221936411118], [0.5330815365189834, 9.010302353597632, 9.56111547243679, 9.661197994487434], [0.1903105079023759, 2.786818469816766, 5.832719446805047, 6.761602614369279], [1.919680254790792, 2.917001185271353, 5.349321215902454, 9.492184154593787], [1.0625898827035574, 3.2684874538623343, 7.766869089419033, 9.008118599293411], [0.09371806040764485, 4.459000395367802, 7.977820514363608, 9.800999436505283], [0.4769435276016343, 0.7088253954732537, 7.739808960717461, 8.192318733937139], [0.19732656741980215, 0.4615697274338926, 1.1144361446975326, 1.7442762046133042], [1.0173340457710622, 2.361094292969793, 2.803186166580348, 9.168945523234077], [4.963585663867606, 7.516234957907653, 8.957853850230897, 8.986624062675146], [1.038830407434892, 3.569502730085612, 7.8758971949538035, 8.411000715390038], [1.8069074042895283, 3.218014306561666, 6.931376174467962, 7.473026750307018], [2.306345860598289, 6.841029501048764, 7.1818027639826, 7.453825931513825], [4.087259310919289, 4.421511727474402, 6.825109433059611, 7.316043227491759], [1.0140454019784384, 5.990253434581801, 6.804485521465232, 9.508348547988799], [0.31386779192979497, 2.99477662715948, 7.701958258182307, 9.878113618462686], [4.448014833010076, 6.277297973687079, 6.754276026220029, 9.342490715160672], [2.54025804019661, 2.959898208571461, 4.067343572127138, 7.664794833875092], [1.2347829235934749, 6.294223473228073, 6.396474178361138, 8.005626936777366], [0.19558567937207783, 5.477702917682647, 5.499460988145661, 7.7881604677200835], [2.4080063028988477, 5.307861334977478, 7.032500081242695, 9.656911173437596], [3.6221828120366952, 3.822398025289174, 6.347418722178613, 7.315309752566563], [2.090876711877417, 3.5588250635123284, 4.602153308373262, 8.62450259480929], [2.633803427210121, 5.565066923616211, 6.349235490983486, 8.902127441332759], [1.2971896348216283, 3.5857916942843913, 4.893206663693483, 7.063350242567119], [0.9828870518510746, 2.268246103899597, 7.759354509050689, 7.765624707816229], [1.5617097484550635, 1.5898104286885673, 1.8428793241953645, 4.773883351922316], [1.9250984723850018, 4.005554279160411, 9.305445663212533, 9.381773046969206], [2.1972475843065586, 2.6273825614676816, 3.158488259693751, 4.144985107087233], [0.20805564426891854, 2.4307534561938207, 5.717099087881337, 9.475792951266481]]
,
                             requires_grad=True)
    label_dataset = torch.tensor(
        [[1.0, 1.0674470662123536, 1.0674470662123536, 1.0674470662123536], [0.5, 0.5, 0.5, 0.5], [0.75, 0.75, 0.75, 0.75], [0.5, 0.5, 0.5, 0.5], [0.5, 0.5229905484312526, 0.5229905484312526, 0.5229905484312526], [0.25, 0.25, 0.25, 0.25], [1.75, 1.75, 1.75, 1.75], [1.75, 1.75, 1.75, 1.75], [1.5, 1.8946066386047293, 1.8946066386047293, 1.8946066386047293], [0.25, 0.25, 0.25, 0.25], [0.5, 0.5, 0.5, 0.5], [1.5, 1.5, 1.5, 1.5], [1.75, 1.75, 1.75, 1.75], [1.0, 1.1446024769642016, 1.1446024769642016, 1.1446024769642016], [1.25, 1.25, 1.25, 1.25], [0.75, 0.9021513020495969, 0.9021513020495969, 0.9021513020495969], [1.25, 1.25, 1.25, 1.25], [1.0, 1.0, 1.0, 1.0], [1.5, 1.5420732120603058, 1.5420732120603058, 1.5420732120603058], [1.5, 1.7940399929050375, 1.7940399929050375, 1.7940399929050375], [1.5, 1.623508717080253, 1.623508717080253, 1.623508717080253], [1.75, 1.9673200455628375, 1.9673200455628375, 1.9673200455628375], [0.25, 0.25, 0.25, 0.25], [0.25, 0.25, 0.25, 0.25], [1.75, 1.75, 1.75, 1.75], [0.5, 0.5, 0.5, 0.5], [0.25, 0.25, 0.25, 0.25], [0.5, 0.5357163027769701, 0.5357163027769701, 0.5357163027769701], [1.5, 1.5, 1.5, 1.5], [1.75, 2.1538652222746864, 2.1538652222746864, 2.1538652222746864], [1.5, 1.5, 1.5, 1.5], [0.25, 0.25, 0.25, 0.25], [1.0, 1.3173796188064757, 1.3173796188064757, 1.3173796188064757], [1.75, 2.083316367660951, 2.083316367660951, 2.083316367660951], [1.5, 1.896063772524331, 1.896063772524331, 1.896063772524331], [1.75, 2.127123325536226, 2.127123325536226, 2.127123325536226], [1.75, 1.75, 1.75, 1.75], [1.0, 1.0, 1.0, 1.0], [0.75, 0.75, 0.75, 0.75], [1.5, 1.931486182902417, 1.931486182902417, 1.931486182902417], [1.75, 1.75, 1.75, 1.75], [0.25, 0.25, 0.25, 0.25], [0.5, 0.5723424446564679, 0.5723424446564679, 0.5723424446564679], [0.5, 0.5006465882112419, 0.5006465882112419, 0.5006465882112419], [1.75, 2.232524125936831, 2.232524125936831, 2.232524125936831], [1.75, 2.2959692152971627, 2.945284651255953, 3.8156691481356906], [1.75, 1.9067910284883651, 1.9067910284883651, 1.9067910284883651], [0.5, 0.5, 0.5, 0.5], [1.25, 1.5027242351962182, 1.6202042148835034, 1.6202042148835034], [0.5, 0.5, 0.5, 0.5], [1.75, 1.913227724703229, 2.1912566240942684, 2.1912566240942684], [1.75, 1.9233722688571768, 1.9233722688571768, 1.9233722688571768], [1.0, 1.0, 1.0, 1.0], [1.0, 1.3117998665839876, 1.3117998665839876, 1.3117998665839876], [1.0, 1.0, 1.0, 1.0], [0.25, 0.31920171363948135, 0.31920171363948135, 0.31920171363948135], [0.5, 0.5139693201791334, 0.5139693201791334, 0.5139693201791334], [1.75, 2.290282726036491, 2.290282726036491, 2.290282726036491], [1.75, 1.75, 1.75, 1.75], [0.25, 0.25, 0.25, 0.25], [0.75, 0.75, 0.75, 0.75], [1.75, 1.75, 1.75, 1.75], [1.25, 1.3817935447257597, 1.3817935447257597, 1.3817935447257597], [1.0, 1.154986252165713, 1.4643458669981377, 1.4643458669981377], [1.25, 1.25, 1.25, 1.25], [0.25, 0.25, 0.25, 0.25], [1.25, 1.25, 1.25, 1.25], [1.0, 1.3222336179555478, 1.3222336179555478, 1.3222336179555478], [0.75, 0.8076129583080615, 0.8076129583080615, 0.8076129583080615], [0.25, 0.25, 0.25, 0.25], [1.75, 2.1873810208045263, 2.1873810208045263, 2.1873810208045263], [1.5, 1.8223061544936723, 1.8223061544936723, 1.8223061544936723], [1.5, 1.9365631640325414, 1.9365631640325414, 1.9365631640325414], [0.25, 0.25, 0.25, 0.25], [1.25, 1.3124700390988142, 1.3124700390988142, 1.3124700390988142], [1.75, 2.3020939798641185, 2.3020939798641185, 2.3020939798641185], [0.75, 0.8410188241327886, 0.907115538462556, 0.907115538462556], [0.75, 0.9342244775267327, 1.1705518525731526, 1.2266675604487727], [1.25, 1.3275553180763127, 1.3275553180763127, 1.3275553180763127], [1.25, 1.25, 1.25, 1.25], [1.75, 1.987056530855036, 1.987056530855036, 1.987056530855036], [1.25, 1.25, 1.25, 1.25], [0.75, 0.75, 0.75, 0.75], [1.25, 1.25, 1.25, 1.25], [1.5, 1.6619848660071872, 1.6619848660071872, 1.6619848660071872], [1.25, 1.562044069356735, 1.562044069356735, 1.562044069356735], [1.75, 1.75, 1.75, 1.75], [1.0, 1.0, 1.0, 1.0], [1.5, 1.5884056921355083, 1.5884056921355083, 1.5884056921355083], [0.75, 0.9348047735426407, 0.9348047735426407, 0.9348047735426407], [0.25, 0.25, 0.25, 0.25], [0.25, 0.25, 0.25, 0.25], [0.5, 0.5, 0.5, 0.5], [1.5, 1.5, 1.5, 1.5], [0.75, 0.75, 0.75, 0.75], [0.75, 0.75, 0.75, 0.75], [1.75, 1.8127634171816456, 1.9242399114281847, 2.005600498661005], [0.75, 0.75, 0.75, 0.75], [0.75, 0.75, 0.75, 0.75], [0.25, 0.2639814519103605, 0.2639814519103605, 0.2639814519103605]]
)


    wb_f_dataset=torch.cat((wb_dataset,f_dataset),dim=1)

    wb_f_train = wb_f_dataset[0:70, :]
    label_train = label_dataset[0:70, :]

    wb_f_evl = wb_f_dataset[70:100, :]
    label_evl = label_dataset[70:100, :]

    # 作图：
    epoch_list = []
    loss_list = []
    epoch = 1
    # train
    for epoch in range(epochs):
        optimizer.zero_grad()
        output = net(wb_f_train)
        loss = loss_func(output, label_train)
        epoch_list.append(epoch)
        loss_list.append(loss.item())
        loss.backward()
        optimizer.step()
        epoch += 1
        print('epoch {}, loss {}'.format(epoch, loss.item()))
    plt.plot(epoch_list, loss_list, linewidth=4)
    plt.title("max-min-train", fontsize=14)
    plt.xlabel("epoch", fontsize=14)
    plt.ylabel("loss", fontsize=14)
    plt.show()

    outputs = net(wb_f_evl)
    evl_mse = torch.nn.functional.mse_loss(
        outputs,  # 预测
        label_evl,  # 目标
    )
    print('{}:{}'.format('MSE', evl_mse))
    evl_mape = torch.sqrt((((outputs - label_evl) / label_evl) ** 2).sum(dim=1))
    evl_mape = evl_mse.sum(dim=0) / 30
    print('{}:{}'.format('MAPE', evl_mape))