import torch
import torch.nn as nn
import numpy as np
import torch.nn.functional as f
from torch.autograd import Variable

in_n = 1
in_c = 1
in_d = 1
in_h = 4
in_w = 5
# in_tensor = torch.tensor([-4.78704,7.29176,3.12263,-8.72102,6.54695,0.408178,-4.7643,8.49932,4.50451,4.82586,-1.61296,-6.11842,6.81645,-4.72776,9.40993,9.21799,6.30043,0.356149,9.26544,0.188188,6.08789,-2.20969,-3.79914,4.79643,6.3003,5.77904,8.94648,-5.96991,3.8167,-0.0843859,-4.35136,5.73947,-7.23059,-5.68433,-7.1156,6.5942,-5.82258,8.67612,2.28971,-5.46597,-6.2194,-2.35787,-4.4362,6.91762,3.8671,2.11413,-1.71618,-4.32053,-8.21891,9.02178,-4.26638,6.41182,8.89988,-9.22317,9.2729,-3.42687,-2.20216,8.36345,-3.33361,-0.582502,8.56064,-6.92284,-7.1699,7.43537,-5.4205,4.00934,5.71638,3.46468,5.52623,-5.09222,5.47945,4.91742,9.52414,5.93015,1.37192,9.75395,2.89023,-2.04077,4.04222,3.16381,4.23908,2.74418,-7.42094,1.31623,2.46494,8.66823,1.67354,9.28316,3.56826,-6.13187,9.67387,8.8831,5.74408,8.51188,-4.05938,2.61206,9.96027,-1.04645,-8.04589,9.46737,-7.64729,-9.73076,3.79234,1.38356,5.56501,-8.32773,-9.79861,-6.05619,-4.16369,8.51229,9.102,9.97116,-3.37822,-4.85615,2.9658,1.38999,5.21447,7.64248,5.8366,6.93744,-3.39166,-8.33209,6.82778,7.35694,-1.2894,2.03941,6.29718,9.58047,4.02015,3.95993,-8.56762,5.8279,7.92214,8.71179,4.16833,0.941185,-5.87077,7.72893,-8.76613,1.47286,4.25104,-5.01745,-8.81623,4.67624,-3.64313,-8.14923,-0.634002,0.288464,-1.79703,-1.60912,-7.93063,1.77927,-8.54887,2.29581,-5.03517,1.36246,1.0434,6.00549,9.72579,5.91634,-8.28623,-1.74356,9.73343,-2.45327,9.71871,-1.86363,-3.20656,3.57297,-3.827,-6.44276,-9.94317,-4.15619,-4.45773,-3.12284,4.29416,4.69674,-7.33118,-5.80284,5.7582,-1.69201,7.84174,6.83193,-8.31157,-1.55255,-7.70349,4.8289,0.715887,0.637753,7.70568,6.00541,9.46672,-8.32805,-3.42426,2.68854,2.65287,-0.949629,-7.89049,6.57886,8.26292,-5.59529,9.82237,9.77793,-0.380024,-1.60657,7.09133,6.35357,-5.33022,5.38799,3.06743,-9.75987,6.3311,-0.172797,3.7186,0.706047,-6.62934,3.53688,2.70942,-5.83601,6.54465,8.51915,-5.68912,-8.44737,6.68568,0.0126266,-4.58005,-1.9761,-4.56915,4.5088,-8.47208,-6.05764,8.32732,0.170442,3.32153,-8.38427,5.42295,9.61641,-4.08579,-6.43964,-7.58975,7.13906,-1.26829,4.40073,9.12775,0.298826,-8.56908,-7.96613,-9.81321,-4.94986,3.47031,3.97446,8.99707,-0.826636,7.54269,-7.26794,-5.02492,9.40869,-3.44601,0.691012,0.613514,-9.04208,4.83711,-0.727122,3.57349,-5.92121,5.06,-7.73425,-2.57527,-9.93008,3.99593,4.10862,3.18426,-2.38422,4.75628,-6.41095,4.86825,9.14203,-2.93139,5.24698,8.85612,1.83041,3.68688,7.44007,4.15799,6.61653,-8.50443,-8.1122,-4.7128,-9.54819,4.94302,-3.8439,-4.22718,3.91336,-5.76644,2.72604,9.23007,-0.196456,4.02423,2.38774,-0.662161,-7.12229,-8.50305,9.59617,6.71738,-0.052084,4.12858,-9.22129,-3.86885,-2.47305,-5.86893,0.94766,-3.75141,-3.68973,0.453667,7.65846,-9.41065,-1.06171,1.9933,-6.80164,-6.89011,3.97459,7.90548,-9.97517,9.65602,4.84056,9.99179,-6.40277,8.96315,-9.4151,0.695107,-2.46621,6.85069,-7.68486,-7.18622,-4.56355,5.92584,7.33369,4.59243,2.48207,4.14193,-8.47604,-3.63806,-6.88839,-4.797,6.20018,4.1395,-6.69942,-6.81984,0.177572,8.60439,4.66839,4.95753,-6.68188,-3.06039,2.74139,-8.99813,-5.68717,-5.81582,5.02363,-4.22776,0.434164,-3.20569,-9.46989,8.03936,-4.3663,8.57346,3.2979,-3.0162,7.09668,-3.60983,3.65685,1.39645,7.8133,-9.76465,1.42154,8.3811,-2.16916,6.4163,-2.56503,-2.19083,-2.66147,4.76092,3.05092,-9.16433,-3.94816,1.66528,5.48386,-3.86454,-6.55615,-9.67938,0.0370197,0.400215,1.00752,6.90939,0.423115,-1.21715,-7.37576,5.35402,-2.31822,-0.7616,-8.30092,5.36562,-9.32146,-3.29936,3.28677,9.72783,-8.45948,6.94707,6.22779,-5.91578,8.84831,-8.5815,6.76844,-2.45159,1.82828,-7.46453,-6.25278,9.20271,8.78454,-0.571094,4.10697,-6.31036,7.25075,6.73556,-2.73694,-8.17233,4.43299,-3.82951,-5.81236,7.83157,8.20166,-1.20481,4.45852,-0.196701,-0.0133972,1.39002,-8.58962,0.297309,3.06067,8.35217,1.06634,-6.33332,-1.35209,1.93706,9.76757,9.53597,-1.73537,6.50838,3.30823,4.73795,6.21264,-8.2167,2.62384,-6.12563,-3.74855,8.98437,-2.38337,-8.41628,9.48848,-4.08245,8.11833,2.81831,-1.29902,1.31891,0.781439,-7.27785,-6.44091,6.59991,-2.61234,-9.14331,-8.75549,-4.08953,2.35875,-4.54904,3.38344,2.08162,-1.62361,8.03012,-9.79245,-4.90968,-4.12554,2.6932,3.67195,-0.124113,4.60381,4.22054,-3.22397,-8.69846,-2.15459,-2.79774,-2.14091,5.5543,-0.503345,9.7401,-1.3655,-9.97653,4.12471,-6.16969,-4.39605,1.17026,1.54491,-5.04233,-6.87144,5.18513,-0.528514,3.98009,-3.32942,-6.23859,3.62428,5.6107,7.38787,2.5351,-0.0338669,-5.33199,-0.548214,-7.21669,-3.21446,-1.88477,-9.14317,6.06876,1.55152,7.33247,2.55073,8.71841,-8.14505,-0.942664,-1.62685,-1.44309,2.21114,-7.8042,6.83471,6.01124,-5.81257,-0.0397253,2.09973,7.27068,-3.20402,9.96441,-9.92882,5.01193,-9.59006,9.36749,5.14741,8.87163,9.39666,-1.69017,-5.95252,-4.16449,5.8315,-9.53472,-9.85296,4.86342,1.53652,-3.33538,-2.15795,0.637836,8.36082,-6.9947,4.04743,4.16455,2.62179,-6.60599,-6.11655,-4.98497,0.570026,-2.39403,3.5087,-9.49885,-4.41989,7.33474,-5.92467,-9.48508,-6.31311,-6.52392,0.142709,9.03262,7.97769,-5.31026,7.44313,6.43705,-7.18851,6.42702,4.65124,3.23986,3.08992,-6.74112,-2.04811,-9.00801,-3.22069,-1.07795,-7.56878,-6.63138,-4.97054,0.841743,-7.80503,0.941943,6.16894,-1.17377,-0.492383,7.95712,-4.89931,-2.22036,9.4196,-7.70934,-8.5618,-7.80458,7.2427,4.6844,4.2508,-9.35575,-1.80801,-2.8397,9.63793,-3.38434,-9.89182,8.56543,-4.70898,4.41497,5.53311,-5.81652,3.52646,1.96571,-6.84886,-9.63808,-8.91933,2.21298,-2.97949,0.15189,0.584812,-7.98571,4.75864,7.78561,-1.9207,0.569,-8.49843,5.76489,0.464625,4.64264,-4.59696,-0.527864,-3.37551,2.43526,2.32199,-9.24299,6.81538,-8.19405,-9.54006,6.15732,7.87834,-1.36214,-8.36466,-3.05366,9.47719,-6.11073,-3.07526,-7.96474,-7.71991,-8.47351,-5.28118,-9.00526,3.6071,-8.81724,-3.29174,1.79686,-7.24111,-9.99043,0.753918,-2.20302,-2.52612,-0.771005,-1.58911,1.8262,0.160395,-5.98214,9.03753,-5.41018,6.96056,-6.67375,4.28909,9.53197,1.63562,9.26308,3.20045,-8.31953,-3.34534,-5.11129,4.48053,-6.78032,2.56554,4.0778,-1.74038,-0.50095,8.18572,-8.04078,-5.02264,0.883924,1.82786,-6.23022,3.11973,1.84997,6.31752,7.95338,9.2344,6.62029,-6.52819,-3.92586,-8.59255,0.375694,2.09013,4.01755,3.24062,4.76665,-0.349882,4.18688,-5.5098,-4.95493,8.32647,6.34215,-0.0329208,-1.02981,-4.27697,1.9374,0.728384,-9.04829,-5.30732,6.19894,5.12504,-8.4283,1.47291,-6.5497,-3.3417,0.167011,-6.80329,7.07584,5.40547,2.54825,-6.33208,-6.14825,-5.34295,-3.58952,9.46813,5.20142,-5.10491,3.30817,-1.07023,-5.99127,9.25592,-3.25957,9.29299,2.81905,0.383097,4.56478,9.85757,-5.91107,-3.9643,3.46602,-3.46527,-5.82368,-6.72205,-6.39805,4.15553,-2.74914,4.83123,3.33149,3.19864,-5.91438,-9.579,-2.75639,9.63198,5.61115,-4.88049,-3.87895,9.60962,-1.37766,-6.57415,7.4737,0.798102,5.54626,-5.30823,3.38349,-9.99731,4.09501,9.79615,7.02094,6.75219,0.285677,-8.96466,-6.71443,-5.82688,-6.5924,9.31962,8.51652,7.12156,3.31889,-4.13365,-2.0298,-0.891797,-3.21433,4.26594,0.85352,2.6042,8.66473,-2.00111,-4.85966,-9.11448,1.26124,1.00959,-1.65073,-7.27266,-3.72775,3.88918,5.07711,1.02888,0.614674,-1.05582,1.54824,-7.49769,-6.43703,-2.18073,8.4073,-4.15772,1.59757,-6.54882,-0.53348,-7.06065,-4.63179,9.63409,-5.47864,-4.90284,-2.11346,-1.81403,4.07243,9.83831,0.0881948,0.395473,3.87068,-3.56564,-7.33291,-5.38826,4.09355,-5.24285,-7.5193,-4.93199,1.74008,-0.757376,-1.18267,-7.99164,-8.37011,3.16068,1.95817,-9.70554,-2.1303,6.88267,-4.09499,8.1604,-0.400769,-5.52962,3.64691,5.41134,7.56045,0.610747,-5.11844,6.94562,7.23734,5.64701,-9.84373,-0.922646,-5.92729,-8.67649,4.98885,2.3491,-4.42259,2.15458,-6.64379,3.04715,-7.25813,5.90203,-1.48151,-0.74597,2.94476,-3.70561,-3.84831,0.755442,-8.72183,-5.53975,-6.72794,8.34415,9.75284,-8.89906,3.50043,9.09416,-1.07947,4.72469,0.210892,2.61156,-0.902834,3.59642,1.82445,-3.53082,-3.75102,7.3863,1.74236,5.00865,-2.03151,-0.948833,-1.65737,-9.32157,-1.15322,6.05973,1.51178,-2.39829,-9.74127,-8.95396,-5.53073,6.85253,6.25595,-5.62077,-7.83279,5.6824,-6.6038,9.97663,-1.32645,4.78192,-8.20679,-2.50824,3.00384,9.91437,6.79866,9.0246,9.76044,4.66704,-1.08657,-4.09298,-2.99941,-3.02531,-3.43002,-0.933445,1.45499,-4.06989,0.830862,-5.527,2.65698,-5.52845,2.93719,0.512323,-3.81089,-1.60796,7.36528,-5.20293,-7.31947,-8.53921,-2.37632,4.22602,-0.941027,-8.2983,3.23858,-9.78596,-4.22167,-9.50418,4.62032,-3.95839,-7.14583,0.71031,2.74291,8.28499,1.86029,8.99982,-0.198772,4.05502,7.9641,-9.54412,6.29446,6.97988,0.400125,7.09229,-5.45453,-8.95537,-7.4545,-0.540164,4.10562,7.58192,8.26007,8.80137,-9.71037,-9.62088,-9.81397,-4.70382,8.1668,-3.47615,-8.12109,-4.02223,2.40751,3.84144,-7.56908,-2.97069,4.94644,-5.09339,-3.24785,4.96706,1.10157,4.31444,5.64897,-1.67898,-0.237287,-7.94132,9.46503,-5.603,-6.35733,4.95683,1.74967,-3.06088,-5.09457,3.84511,7.38319,7.55643,-9.3948,-5.63237,-0.352946,5.72906,4.48997,-2.05032,8.87184,0.959674,-5.70914,-8.32655,8.12489,-4.61821,-1.58101,1.08555,-9.60003,-5.28526,8.98398,7.91269,2.05595,-3.42391,-7.81661,7.33822,8.00075,3.90606,9.87516,-8.87794,1.5354,-2.77246,-9.34633,7.65668,9.50392,-7.12924,3.87202,-4.43956,1.79163,-0.827016,5.49543,5.28061,-0.186868,5.37455,0.554503,-1.79865,8.47956,-1.5237,-3.10404,-9.4302,-9.08358,-7.81268,2.92865,3.7675,2.66024,0.615016,-5.9885,-4.07205,1.24747,-3.82408,4.92498,-8.47999,4.86766,-3.0612,-4.32711,8.04744,3.91632,9.77787,6.14878,-6.79891,-7.0975,-3.13169,-4.18198,-3.00737,-2.47478,-8.39601,-9.67599,-9.84012,-3.62011,-7.579,6.17361,3.88232,8.73258,5.54463,8.4174,-7.41345,-5.22836,-4.44162,2.08383,1.17781,4.26916,0.535799,-7.91956,-9.65577,-6.7168,-7.09802,-5.78526,-2.38029,6.97007,-9.33953,-7.43957,-0.388945,8.96445,-8.18325,-8.46238,-1.16223,3.45355,1.35204,-8.6117,-8.59205,6.37169,-4.81405,-2.99145,3.94304,-7.839,-8.65748,7.28958,0.216861,-6.44962,0.87047,9.95773,6.5962,-3.30503,-6.37864,-0.704162,0.396658,6.4259,-5.74984,-0.961784,5.51272,9.28956,6.71103,-1.63645,-1.9156,-4.86953,-3.67667,-6.52916,7.0914,5.32768,-0.974892,-9.10909,-9.88001,1.82474,-2.12338,2.43012,1.40605,-3.88925,-4.17889,9.52845,-0.0267429,-9.94585,-0.760839,-2.75129,-8.37807,3.75648,-0.0970135,-2.47949,7.26335,-2.38985,1.00944,9.19881,9.21706,9.43254,0.180179,-0.757524,-5.21406,9.96334,0.90251,9.46151,3.89683,-3.57181,5.26422,-4.52451,-3.45858,-0.976373,1.33788,2.10516,5.75318,1.63916,-0.676396,-1.4638,-2.36309,5.26005,-7.09328,3.80622,-2.51456,-6.17078,-0.00978851,-5.16834,3.84463,-2.00136,7.01917,-1.34223,9.41075,-8.6912,5.70162,-6.66952,-5.9242,1.18346,-4.5335,-8.58172,3.23154,-4.32255,3.02301,5.63929,-1.60507,-5.36381,6.12051,7.06398,-6.66564,4.4643,-4.10249,-0.0546484,-0.566595,3.42797,9.86838,-9.82889,6.88158,5.19431,6.58614,-1.86504,9.52846,-8.82541,-4.86793,0.0675716,-7.91164,-4.35854,-1.4869,-6.74731,5.28976,6.03531,-2.29102,4.26493,3.74157,-7.26824,-8.9906,4.16157,-0.127248,-5.21741,-4.08997,-6.19554,0.564431,0.874636,-3.6277,-5.92872,5.15923,4.01109,-1.3988,-8.979,9.40858,8.0273,3.6913,-9.23608,3.29951,-2.70127,1.90977,-0.119848,8.56555,0.152818,6.00244,9.78716,1.6405,6.4255,-6.25427,4.59234,0.0310955,-5.60094,-9.74921,-7.81314,-5.5922,6.84349,-6.31804,-4.28374,6.02423,-8.40669,-4.05684,1.90211,3.9474,8.11098,-9.8922,-3.34714,4.48706,-9.55294,2.24897,-4.54349,2.55522,3.38746,8.40929,0.703522,-9.28947,-2.46074,-4.21294,2.02066,-7.5637,-7.62793,-8.99466,8.82536,-9.1802,-1.80317,8.90188,8.5627,3.83035,-7.50708,-2.73263,-1.42693,0.325605,-9.06908,7.41928,-4.17746,7.35179,8.66008,0.424052,-9.49344,3.08837,5.88571,6.55295,9.17829,4.91806,-3.0227,-1.32997,7.91437,3.4702,-6.61549,7.89996,-1.21068,-1.68117,1.31266,-3.54401,-1.8392,-3.56864,-9.60515,9.69355,3.75694,-2.23607,9.45274,-4.3335,-1.69552,-6.86227,-0.327483,9.20233])
# in_tensor = torch.tensor([-4.78704,7.29176,3.12263,-8.72102,6.54695,0.408178,-4.7643,8.49932,4.50451,4.82586,-1.61296,-6.11842,6.81645,-4.72776,9.40993,9.21799,6.30043,0.356149,9.26544,0.188188,6.08789,-2.20969,-3.79914,4.79643,6.3003,5.77904,8.94648,-5.96991,3.8167,-0.0843859,-4.35136,5.73947,-7.23059,-5.68433,-7.1156,6.5942,-5.82258,8.67612,2.28971,-5.46597,-6.2194,-2.35787,-4.4362,6.91762,3.8671,2.11413,-1.71618,-4.32053,-8.21891,9.02178,-4.26638,6.41182,8.89988,-9.22317,9.2729,-3.42687,-2.20216,8.36345,-3.33361,-0.582502,8.56064,-6.92284,-7.1699,7.43537,-5.4205,4.00934,5.71638,3.46468,5.52623,-5.09222,5.47945,4.91742,9.52414,5.93015,1.37192,9.75395,2.89023,-2.04077,4.04222,3.16381,4.23908,2.74418,-7.42094,1.31623,2.46494,8.66823,1.67354,9.28316,3.56826,-6.13187,9.67387,8.8831,5.74408,8.51188,-4.059z38,2.61206,9.96027,-1.04645,-8.04589,9.46737,-7.64729,-9.73076,3.79234,1.38356,5.56501,-8.32773,-9.79861,-6.05619,-4.16369,8.51229,9.102,9.97116,-3.37822,-4.85615,2.9658,1.38999,5.21447,7.64248,5.8366,6.93744,-3.39166,-8.33209,6.82778,7.35694,-1.2894,2.03941,6.29718,9.58047,4.02015,3.95993,-8.56762,5.8279,7.92214,8.71179,4.16833,0.941185,-5.87077,7.72893,-8.76613,1.47286,4.25104,-5.01745,-8.81623,4.67624,-3.64313,-8.14923,-0.634002,0.288464,-1.79703,-1.60912])
in_tensor = torch.tensor([-4.78704,7.29176,3.12263,-8.72102,6.54695,0.408178,-4.7643,8.49932,4.50451,4.82586,-1.61296,-6.11842,6.81645,-4.72776,9.40993,9.21799,6.30043,0.356149,9.26544,0.188188])
in_tensor = torch.reshape(in_tensor,[1,1,in_h,in_w])


in_tensor =  Variable(in_tensor,requires_grad=True)
# tensor = torch.zeros([3,3,3,3,2])
print('in_tensor')
print(in_tensor.shape)
print(in_tensor)

ke_n = 1
ke_c = 1
ke_d = 1
ke_h = 3
ke_w = 2
# ke_tensor = torch.tensor([-5.87369,6.04822,-3.36429,-5.21126,6.11504,6.70013,7.18409,9.47699,6.64836,-9.85381,7.89421,-4.50139,-4.57339,4.31849,4.12179,4.55348,1.154,5.93655,2.30729,3.00561,6.31285,-6.69501,8.54382,-3.07795,-7.27275,-2.50723,5.96717,4.81293,-6.01449,0.182938,5.75271,-5.13291,-7.27012,2.40926,1.56139,-4.10088,-9.36739,2.78526,8.32339,0.371543,-1.36313,5.1011,0.561739,7.02668,2.66189,1.56967,1.3517,4.06595,0.491442,-2.17368,-3.0104,4.27921,-4.71126,2.91879,5.0614,6.28453,-8.94049,-2.20568,9.29337,6.26661,4.65353,-5.72062,-0.102357,0.782831,-4.4596,7.36859,5.21098,0.744642,5.98375,-7.0194,-8.82875,5.62327,-0.415035,-7.73186,-8.3642,1.32598,-0.30655,-5.53139,8.29245,-6.48645,-1.01695,7.77566,4.5531,5.79316,4.33745,-8.19494,4.11348,-4.15556,0.890832,5.30216,-6.15247,-2.54793,-3.64081,1.19097,3.48536,-2.66894,-4.6092,7.68911,6.77991,8.02457,2.84031,4.50546,-4.95953,2.44335,9.76738,-9.85128,-7.25282,-8.80828,2.53815,1.18471,1.45845,-7.10757,-3.313,9.42119,7.98133,0.163433,0.780462,-8.30326,-4.70362,-3.1394,-9.44617,9.84272,-7.23673,-6.36027,-6.32675,5.30044,-9.1221,-0.802198,-0.5906,2.02265,-5.95872,5.64793,3.8677,0.920856,3.72228,-4.09388,3.24698,-5.58634,-9.18081,4.35674,7.33739,4.00352,-2.76798,-2.3373,-8.45569,9.21519,7.83133,2.43127,0.654886,-3.79848,0.156173,-9.53104,-7.49541,-4.29096,-8.67234,-0.415396,-0.667879,1.11637,-0.813746,4.83125,-0.517439,-5.63104])
# ke_tensor = torch.tensor([-5.87369,6.04822,-3.36429,-5.21126,6.11504,6.70013,7.18409,9.47699,6.64836,-9.85381,7.89421,-4.50139,-4.57339,4.31849,4.12179,4.55348,1.154,5.93655])
ke_tensor = torch.tensor([-5.87369,6.04822,-3.36429,-5.21126,6.11504,6.70013])
ke_tensor = torch.reshape(ke_tensor,[1,ke_d,ke_h,ke_w])
ke_tensor = Variable(ke_tensor,requires_grad=True)
print('ke_tensor')
print(ke_tensor.shape)
print(ke_tensor)


# m = nn.Conv3d(3, 3, (3, 3, 2), stride=(1, 1, 1), padding=(1, 1, 1))
out_tensor = f.conv2d(in_tensor,ke_tensor,padding=1,dilation=1,groups=1)

out_tensor.backward(out_tensor.clone().detach())

# m.weight(ke_tensor)

# output = m(in_tensor)

print(out_tensor)
print(ke_tensor.grad)
print(in_tensor.grad)



