import torch
import torch.nn as nn
import matplotlib.pyplot as plt
import os
from torch.nn import functional as F
import numpy as np
import random
os.environ["KMP_DUPLICATE_LIB_OK"]="TRUE"

def seed_it(seed):
    random.seed(seed)
    os.environ["PYTHONSEED"] = str(seed)
    np.random.seed(seed)
    torch.cuda.manual_seed(seed)
    torch.cuda.manual_seed_all(seed)
    torch.backends.cudnn.deterministic = True #确定性固定
    torch.backends.cudnn.benchmark = True #False会确定性地选择算法，会降低性能
    torch.backends.cudnn.enabled = True  #增加运行效率，默认就是True
    torch.manual_seed(seed)
# 二次函数的随机种子27
# max-min的随机种子23
seed_it(23)
# input
# wb_dataset = torch.tensor([[10.0],[20.0]],requires_grad=True)
# f_dataset = torch.tensor([[2,2.6,4,5],[4,5.2,8,10]],requires_grad=True)
# label_dataset = torch.tensor([[2,2.6,2.7,2.7],[4,5.2,5.4,5.4]])

wb_dataset = torch.tensor([[16.0], [4.0], [1.0], [24.0], [6.0], [3.0], [11.0], [19.0], [21.0], [22.0], [23.0], [5.0], [30.0], [8.0], [16.0], [26.0], [2.0], [4.0], [30.0], [4.0], [15.0], [18.0], [13.0], [9.0], [3.0], [25.0], [19.0], [3.0], [28.0], [1.0], [18.0], [30.0], [25.0], [10.0], [17.0], [26.0], [18.0], [20.0], [16.0], [6.0], [25.0], [25.0], [30.0], [30.0], [20.0], [25.0], [8.0], [4.0], [1.0], [16.0], [15.0], [26.0], [10.0], [25.0], [27.0], [27.0], [7.0], [13.0], [30.0], [12.0], [2.0], [8.0], [14.0], [21.0], [21.0], [14.0], [17.0], [14.0], [7.0], [14.0], [8.0], [8.0], [24.0], [3.0], [27.0], [15.0], [27.0], [25.0], [20.0], [1.0], [16.0], [5.0], [4.0], [23.0], [29.0], [24.0], [24.0], [29.0], [23.0], [30.0], [15.0], [6.0], [5.0], [5.0], [28.0], [23.0], [15.0], [30.0], [1.0], [16.0]],requires_grad=True)
f_dataset = torch.tensor([[0.5126196341334943, 4.8480956998790115, 6.777484475929885, 9.650486732031535], [1.5328178262827952, 3.686465218649173, 6.839008675476802, 7.123815173373523], [0.2590743073758872, 1.7752967495104277, 3.8588274929744615, 9.248738656380851], [0.12475558024847122, 0.9362668264133511, 1.2987621379001313, 9.461537960464817], [1.003253262993412, 2.8602202021975573, 4.350828830774159, 5.131303671749814], [7.716849795983737, 7.751816661883408, 8.976171055635419, 9.01686510828537], [0.32012880709457847, 0.33462378206023824, 1.7521792201803288, 7.625380482133051], [5.034257388367422, 6.995139808109105, 7.894793002544466, 8.542667305328141], [0.4982972784166517, 1.8446340861923571, 3.7438868630483313, 7.007404404911345], [2.5659462156193213, 7.79774891670843, 9.436322393764025, 9.755576535290523], [4.685367214116971, 6.717702332041032, 7.3264556178422025, 8.762963280924156], [1.380635781940912, 2.760357985677524, 6.4195070028503185, 9.798463162199631], [5.683826036292161, 7.398913268947891, 8.380782870667549, 9.484580472770023], [0.7408010099814399, 2.132853752880691, 3.799980767290757, 7.161043213809077], [4.118562358242304, 4.26711494842769, 6.569771391364439, 8.00350389604008], [0.8434310606751283, 2.1405062207685055, 4.070116168177238, 6.358854092479645], [4.893016195368977, 5.012723660840535, 5.300471917277388, 5.866527807474348], [0.47963113113071887, 1.1321163135888823, 4.470608624896027, 6.896611901195087], [2.5691659142020873, 3.880161701479251, 7.137790743606369, 7.4213727698591025], [1.0832505630930311, 8.311829112467361, 8.548965014971207, 8.965593315239008], [0.5753780250584506, 1.1601432337918094, 3.5121872236280502, 6.101369207133636], [1.1455413119574787, 2.09207680743166, 2.401648739851324, 4.648701913550955], [0.23651821045037935, 3.273921652227796, 6.653785160556435, 7.852361592355326], [0.39167392948246116, 2.904842078165961, 4.350924097653057, 7.283756261202882], [1.4493744450011192, 3.107813730181106, 4.0732584925191375, 6.074056390705942], [0.9703088589964726, 2.704894120838568, 5.614959896381175, 9.861044488192851], [2.4851031922274824, 2.498299328292571, 5.40061606091906, 7.612590505893354], [4.458287411056165, 4.476767881202873, 7.367699289066585, 9.796205152656087], [0.5786987443954239, 1.7100525934917277, 5.084129182333024, 5.630306176061717], [1.8413020920989698, 6.9948625349471625, 7.957001428363615, 9.154926098147968], [2.3907353389496544, 3.3165730083524214, 5.354913015832343, 5.570136986715461], [1.1404804300644833, 5.122433847980911, 7.15232833900189, 7.2910133596061435], [1.2354115336581828, 4.827752331121181, 8.956669145480664, 9.71201662259473], [0.023527418496260832, 3.224554513411435, 3.319942554807902, 6.427411433053952], [1.139673203662509, 4.042398155271586, 4.095859416140789, 8.682594559285766], [0.8284122339602418, 0.9303769792295691, 6.451854902978274, 9.546028798149003], [1.118513701710936, 1.5546484149486683, 6.360798265232876, 7.730481432569016], [2.887343212164708, 4.0379340135723485, 4.360656874542022, 9.713820729740693], [0.59587158231315, 2.0021388137968854, 4.699242813327114, 7.39249347463164], [0.8705317064479279, 4.872241500754413, 7.125121200309729, 7.264371096134395], [3.740411244165849, 4.604859225385168, 4.82801255843037, 7.684490069023578], [0.83432818048498, 2.8391981981232783, 5.716491746274307, 9.833722526358331], [4.060511358851362, 8.499731515679185, 9.087168072981074, 9.89014946006521], [0.20184533686480655, 1.3072093004814245, 7.871494226048224, 8.038376287807687], [4.178227228525639, 4.643419848042379, 6.416597722924353, 8.79272334698282], [0.1315465468439314, 0.25812719199046374, 2.7118227961292662, 9.538344015658632], [1.7412794117186237, 5.144817301517398, 7.62751554948977, 8.343716835832607], [0.8034440742471327, 1.848934184155413, 2.1705742540094075, 7.879225736820667], [1.8119090203828203, 5.203748232970458, 6.147085587501627, 9.479860213281281], [3.3223126307208894, 3.6730561891959357, 7.895660446897238, 8.731058191653316], [2.970487579791982, 4.248935871326026, 9.03951093954961, 9.703193189894662], [2.651217578176478, 4.7749310510073055, 6.228878300513642, 7.953957435361774], [0.3274591956744777, 0.3555972313828448, 4.269427391803575, 4.780796988962263], [1.548535478679528, 4.766265892315213, 4.797146885890292, 9.953766146071914], [0.30364427229536695, 1.0156046000348695, 7.3245371976024725, 8.077783291795278], [3.3552290646107297, 5.216454688454316, 7.383234505354746, 7.678863484717118], [2.3150047940080087, 3.1009750157628635, 4.456898205080096, 8.127514811322085], [0.8697613051310193, 5.376955085715024, 5.79547044707774, 7.43428581673887], [2.597366130943861, 4.257425091652578, 4.897482855354195, 9.090429239907907], [2.328830483140749, 3.2501123496120687, 4.243789905535072, 4.805670062266994], [0.2706906895053718, 2.7702664377187523, 4.8508694452208045, 5.432631729344346], [0.060546195214924436, 1.2636846389867185, 6.421017973397513, 8.680936725516881], [2.1778942765826725, 4.078995040029475, 5.467503613016901, 5.731625267714277], [1.5479639073702878, 4.578430067992101, 6.84915544071406, 7.754778075616283], [2.3623774658303685, 3.329424700116921, 5.933147262182311, 9.977963930200033], [1.6279298580696455, 1.6999027546964118, 2.72093643755234, 3.541880624355487], [2.4963073797569613, 3.1846911394673993, 4.0320131069176135, 7.456152922138449], [1.0183549826982274, 2.130617206227331, 2.326146255781716, 6.540597529064046], [4.973405871857953, 5.625279084261187, 5.725715180986213, 9.547670213655769], [1.1782259680401252, 4.638343342840786, 7.674694712873481, 8.534663121558092], [0.18761940010243783, 1.0121118486787384, 2.1917685893381424, 4.559067107000408], [2.3361557095614858, 3.086278434528843, 6.155507335095217, 7.02615092572151], [1.3958600961558487, 6.153144019210266, 8.02308097567246, 9.648884284927849], [5.886096322437733, 8.853122874996595, 9.245647320843215, 9.709589504947019], [5.3024257316039956, 7.6015950185718975, 8.007310457993237, 9.317767179999016], [2.602563390902616, 6.020328306146804, 7.4016185619604355, 9.42592353880684], [2.4050428356467837, 5.045186568312103, 6.054371311021708, 9.159195728349301], [4.2173608618371485, 8.601930881475727, 8.821041223235408, 9.473668731365116], [6.0537892898442465, 6.929043066924634, 7.899303330449047, 9.12771428953495], [2.783470297335242, 4.081413025065456, 6.033746999444491, 7.257725739687617], [1.6015262807077302, 2.986084463847857, 5.474033272365722, 9.561661351149048], [1.8947548128305736, 3.4743848992909276, 5.237295706292434, 8.204377492877823], [2.340091439315116, 2.3838165971144623, 2.7015645193587923, 6.99257740511038], [2.103221617932279, 8.000563911825504, 9.44629738120113, 9.729549620050555], [1.0080615439578244, 1.7201878286182504, 3.9653263801454406, 7.76751425862296], [1.0353052511717264, 4.701681394341563, 4.942701915090318, 8.077895355183838], [0.7055308965749885, 2.484313375303521, 5.580879002049767, 9.36981961147013], [0.38421638361476274, 1.7986452536362407, 3.7044706408434136, 7.12687918919948], [1.0989053554977135, 4.652865051825636, 5.950387582751051, 9.09568387522597], [2.5849853777151943, 5.516785787561415, 5.761023804239373, 9.771727611604385], [0.573343289300503, 5.783850026896062, 8.062745959743935, 8.094805094335523], [1.6729322524404355, 2.9597268417863964, 9.263539995528205, 9.747637919494988], [1.4072973222648621, 1.9150243551474444, 2.0068099382381033, 9.83085974072983], [0.7822245804476979, 3.3695552941133124, 7.110112986441792, 8.005786488248752], [4.5827054355387355, 5.616072332213269, 8.717273473809001, 9.582327509610852], [0.9604377526717023, 1.316756880947172, 5.768804634781119, 8.60105189046197], [2.932134100414104, 6.671173957214195, 7.230463880310358, 8.262706604374088], [0.24247900543851397, 5.5556028102898845, 5.561622267111748, 6.552614661126227], [0.8195824623203762, 2.517436296746082, 5.485554087548127, 9.795753607190294], [1.91382835661275, 4.467855102703009, 8.061784282970304, 8.330692473099427]],requires_grad=True)
label_dataset = torch.tensor([[4.0, 5.162460121955502, 5.319642332993747, 5.319642332993747], [1.0, 1.0, 1.0, 1.0], [0.25, 0.25, 0.25, 0.25], [6.0, 7.958414806583843, 11.469488796669088, 21.640215455438046], [1.5, 1.6655822456688627, 1.6655822456688627, 1.6655822456688627], [0.75, 0.75, 0.75, 0.75], [2.75, 3.5599570643018073, 5.172623705422591, 8.593068190664853], [4.75, 4.75, 4.75, 4.75], [5.25, 6.833900907194449, 9.328534317695496, 14.91318177234266], [5.5, 6.478017928126893, 6.478017928126893, 6.478017928126893], [5.75, 6.104877595294343, 6.104877595294343, 6.104877595294343], [1.25, 1.25, 1.25, 1.25], [7.5, 8.105391321235945, 8.458630347379973, 8.536477824092398], [2.0, 2.4197329966728534, 2.5631726185689345, 2.5631726185689345], [4.0, 4.0, 4.0, 4.0], [6.5, 8.385522979774958, 11.508031359278185, 18.945946550379134], [0.5, 0.5, 0.5, 0.5], [1.0, 1.1734562896230938, 1.1941262776401995, 1.1941262776401995], [7.5, 9.143611361932638, 11.775336192159333, 16.412881640712296], [1.0, 1.0, 1.0, 1.0], [3.75, 4.808207324980517, 6.632239370574871, 9.75229151752169], [4.5, 5.618152896014173, 7.38119094030543, 12.360733140759535], [3.25, 4.254493929849874, 4.744780068660912, 4.744780068660912], [2.25, 2.8694420235058464, 2.8694420235058464, 2.8694420235058464], [0.75, 0.75, 0.75, 0.75], [6.25, 8.009897047001175, 10.662398510082479, 15.709837123783782], [4.75, 5.504965602590839, 7.008298739739972, 8.615981418560885], [0.75, 0.75, 0.75, 0.75], [7.0, 9.140433751868192, 12.855624331056426, 20.62711947977983], [0.25, 0.25, 0.25, 0.25], [4.5, 5.203088220350115, 6.146345826348962, 6.9377786368655805], [7.5, 9.619839856645171, 11.868542860977302, 16.584757382952713], [6.25, 7.921529488780606, 9.468418067610319, 9.980166989739974], [2.5, 3.3254908605012465, 3.3759590340461525, 3.431975513284403], [4.25, 5.286775598779164, 5.908964320532952, 7.722069224925115], [6.5, 8.390529255346586, 12.120605393405093, 17.78935588383191], [4.5, 5.627162099429688, 7.663418941670198, 8.96603961810752], [5.0, 5.704218929278431, 6.537361387131472, 8.714065899720921], [4.0, 5.1347094725622835, 6.700994801944983, 8.702746790562852], [1.5, 1.7098227645173574, 1.7098227645173574, 1.7098227645173574], [6.25, 7.0865295852780505, 8.327364765224491, 11.826716972018612], [6.25, 8.05522393983834, 10.66323681069587, 15.609981875117434], [7.5, 8.646496213716212, 8.719878562734724, 8.719878562734724], [7.5, 9.932718221045064, 14.245472681326884, 20.619451136605544], [5.0, 5.273924257158121, 5.589176461715992, 5.589176461715992], [6.25, 8.289484484385357, 12.305163130582802, 21.898503465036338], [2.0, 2.086240196093792, 2.086240196093792, 2.086240196093792], [1.0, 1.0655186419176224, 1.0655186419176224, 1.0655186419176224], [0.25, 0.25, 0.25, 0.25], [4.0, 4.225895789759703, 4.502315590041587, 4.502315590041587], [3.75, 4.009837473402673, 4.009837473402673, 4.009837473402673], [6.5, 7.782927473941174, 9.286925685408109, 12.344973070302576], [2.5, 3.2241802681085074, 4.658471786471338, 5.047516181139102], [6.25, 7.817154840440157, 9.342599314502628, 13.888051743114964], [6.75, 8.898785242568211, 12.840375563834883, 18.35621393006729], [6.75, 7.881590311796423, 9.214158123467477, 11.045081741580209], [1.75, 1.75, 1.75, 1.75], [3.25, 4.04341289828966, 4.04341289828966, 4.04341289828966], [7.5, 9.13421128968538, 11.57260438870178, 18.247725922049366], [3.0, 3.223723172286417, 3.223723172286417, 3.223723172286417], [0.5, 0.5764364368315428, 0.5764364368315428, 0.5764364368315428], [2.0, 2.6464846015950254, 3.337884582899179, 3.337884582899179], [3.5, 3.9407019078057757, 3.9407019078057757, 3.9407019078057757], [5.25, 6.4840120308765705, 7.436803012318805, 8.02445058392355], [5.25, 6.21254084472321, 7.654098917026355, 9.375050571870398], [3.5, 4.124023380643451, 5.33608369361697, 7.951230949681601], [4.25, 4.83456420674768, 5.65950074038782, 7.286988373858026], [3.5, 4.327215005767258, 5.425513905537221, 8.524881555292726], [1.75, 1.75, 1.75, 1.75], [3.5, 4.273924677319958, 4.273924677319958, 4.273924677319958], [2.0, 2.604126866632521, 3.400134375609412, 4.608500161880682], [2.0, 2.0, 2.0, 2.0], [6.0, 7.534713301281384, 8.225497942316942, 8.427914908961423], [0.75, 0.75, 0.75, 0.75], [6.75, 7.232524756132001, 7.232524756132001, 7.232524756132001], [3.75, 4.132478869699128, 4.132478869699128, 4.132478869699128], [6.75, 8.198319054784406, 9.774885298020557, 13.495399285019406], [6.25, 6.927546379387617, 6.927546379387617, 6.927546379387617], [5.0, 5.0, 5.0, 5.0], [0.25, 0.25, 0.25, 0.25], [4.0, 4.79949123976409, 5.706194627722207, 5.938355983078691], [1.25, 1.25, 1.25, 1.25], [1.0, 1.0, 1.0, 1.0], [5.75, 6.965592794022574, 6.965592794022574, 6.965592794022574], [7.25, 9.330646152014058, 13.135875313711962, 22.306424247278485], [6.0, 7.654898249609425, 9.131506677243356, 13.320311439396395], [6.0, 7.764823034475004, 10.405077864060745, 15.229276726071724], [7.25, 9.53859453879508, 13.4085691813745, 23.112667721905584], [5.75, 7.300364881500762, 8.624114796338326, 11.2978420099256], [7.5, 9.13833820742827, 10.949114417361697, 16.137205030484022], [3.75, 4.808885570233166, 4.808885570233166, 4.808885570233166], [1.5, 1.5, 1.5, 1.5], [1.25, 1.25, 1.25, 1.25], [1.25, 1.4059251398507673, 1.4059251398507673, 1.4059251398507673], [7.0, 7.8057648548204215, 8.900611116123997, 9.083948758438993], [5.75, 7.346520749109432, 10.361402683190562, 14.954000731600004], [3.75, 4.022621966528632, 4.022621966528632, 4.022621966528632], [7.5, 9.91917366485383, 12.100959092135803, 18.640295917159857], [0.25, 0.25, 0.25, 0.25], [4.0, 4.69539054779575, 4.80915827034212, 4.80915827034212]])
wb_train = wb_dataset[0:70,:]
f_train=f_dataset[0:70,:]
label_train = label_dataset[0:70,:]
wb_evl =wb_dataset[70:100,:]
f_evl=f_dataset[70:100,:]
label_evl = label_dataset[70:100,:]

# wb_np=np.random.randn(100,1)
# f_np=np.random.randn(100,4)
# lable_np=f_np.sum(axis=1)**2+wb_np
# wb_dataset =torch.from_numpy(np.float32(wb_np))
# f_dataset = torch.from_numpy(np.float32(f_np))
# label_dataset = torch.from_numpy(np.float32(lable_np))

# label
# TODO: label应该是使用max-min fairness算法得到的标准答案



# Model
class Max_Min_Model(nn.Module):

    def __init__(self):
        super(Max_Min_Model,self).__init__()
        self.model = Model()
        self.output = nn.Linear(1, 4)

    def forward(self,wd,f):  # wd: 1x1, f: 4*1
        row=wd.size()[0]
        zero = torch.tensor([[0.0]]*row)
        wd_1 = torch.cat((zero,wd), 1)  # wd:2*1
        out1 = self.model(wd_1,f)  # 1*1
        # wd_=wd[:,1].view(row,-1)
        # wd = torch.cat((out1,wd[1]), 0)  # TODO:tensor维数还有问题，切片后维数下降，需要回到2维
        wd_2 = torch.cat((out1, wd), 1)
        out2 = self.model(wd_2,f)
        # wd = torch.cat((out2, wd[1]), 0)
        wd_3 = torch.cat((out2, wd), 1)
        out3 = self.model(wd_3,f)

        wd_4 = torch.cat((out3, wd), 1)
        out4 = self.model(wd_4, f)
        out = torch.cat((out1,out2,out3,out4), 1)
        return out

class Model(nn.Module):
    def __init__(self):
        super(Model, self).__init__()
        self.first = First_layer()
        self.second = Second_layer()
        self.third = nn.Linear(4,1)

    def forward(self,wd,f):
        first = self.first(wd)  # 4*1
        second = self.second(first,f) # 4*1
        output = F.relu(self.third(second))  # 1*1
        return output


class First_layer(nn.Module):
    def __init__(self):
        super(First_layer,self).__init__()
        self.first = nn.Linear(2,4)

    def forward(self,wd):
        wd=wd.reshape(-1,2)
        return F.relu(self.first(wd))

class Second_layer(nn.Module):
    def __init__(self):
        super(Second_layer,self).__init__()
        self.s1 = nn.Linear(2, 1)
        self.s2 = nn.Linear(2, 1)
        self.s3 = nn.Linear(2, 1)
        self.s4 = nn.Linear(2, 1)

    def forward(self,first,flows):
        # first = first.reshape(-1,1)
        # t_tuple = torch.split(first,1)
        # f_tuple = torch.split(flows,1)
        # a=first[:, 0].reshape(-1,1)
        f1 = torch.cat((first[:,0].reshape(-1,1), flows[:,0].reshape(-1,1)), 1)
        f2 = torch.cat((first[:,1].reshape(-1,1), flows[:,1].reshape(-1,1)), 1)
        f3 = torch.cat((first[:,2].reshape(-1,1), flows[:,2].reshape(-1,1)), 1)
        f4 = torch.cat((first[:,3].reshape(-1,1), flows[:,3].reshape(-1,1)), 1)
        # f1 = torch.cat((t_tuple[0], f_tuple[0]), 0)
        # f2 = torch.cat((t_tuple[1], f_tuple[1]), 0)
        # f3 = torch.cat((t_tuple[2], f_tuple[2]), 0)
        # f4 = torch.cat((t_tuple[3], f_tuple[3]), 0)
        row=first.size()[0]
        out1 = F.relu(self.s1(f1.reshape(row,-1)))
        out2 = F.relu(self.s2(f2.reshape(row,-1)))
        out3 = F.relu(self.s3(f3.reshape(row,-1)))
        out4 = F.relu(self.s4(f4.reshape(row,-1)))
        return torch.cat((out1,out2,out3,out4),1)


net = Max_Min_Model()
# print(net(wb_dataset,f_dataset))
import numpy as np
epochs = 100
lr=0.01
optimizer = torch.optim.SGD(net.parameters(), lr=lr)
loss_func = nn.MSELoss()
# labels=label_dataset

# 作图：
epoch_list=[]
loss1_list=[]
loss2_list=[]
loss3_list=[]
loss4_list=[]
loss_list=[]

epoch=1
# train
for epoch in range(epochs):

    # inputs = torch.from_numpy(x_)
    # labels = torch.from_numpy(label_dataset)
    # outputs = net(inputs)
    optimizer.zero_grad()
    outputs = net(wb_train,f_train)
    output1=outputs[:,0]
    output2 = outputs[:, 1]
    output3 = outputs[:, 2]
    output4 = outputs[:, 3]
    # TODO：loss目前想的是用模型输出的分配资源总和与使用max-min算法的标准答案比较
    label1 = label_train[:,0]
    label2 = label_train[:, 1]
    label3 = label_train[:, 2]
    label4 = label_train[:, 3]
    loss1 = loss_func(output1, label1)
    loss2 = loss_func(output2, label2)
    loss3 = loss_func(output3, label3)
    loss4 = loss_func(output4, label4)
    loss = loss1+loss2+loss3+loss4
    epoch_list.append(epoch)
    # loss1_list.append(loss1.item())
    # loss2_list.append(loss2.item())
    # loss3_list.append(loss3.item())
    # loss4_list.append(loss4.item())
    loss_list.append(loss.item())

    loss.backward()
    optimizer.step()
    epoch += 1
    print('epoch {}, loss {}'.format(epoch, loss.item()))
plt.plot(epoch_list, loss_list, linewidth=4)
plt.title("max-min-train",fontsize=14)
plt.xlabel("epoch", fontsize=14)
plt.ylabel("loss", fontsize=14)
plt.show()



outputs=net(wb_evl,f_evl)
labels=label_evl
evl_mse=torch.nn.functional.mse_loss(
outputs,  # 预测
labels, # 目标
)
print('{}:{}'.format('MSE',evl_mse))
evl_mape=torch.sqrt((((outputs-labels)/labels)**2).sum(dim=1))
evl_mape=evl_mse.sum(dim=0)/30
print('{}:{}'.format('MAPE',evl_mape))



