import cvxpy as cp
import math

from prob import *
from NN import cdinn2, ann
import torch

# act = 0.5 * x + Math.sqrt(0.25 * x * x + 0.0001)
def act(x):
        # return 0.5 * x + math.sqrt(0.25 * x * x + 0.0001)  # bent-ReLU
        # return 0.5 * x + cp.sqrt(0.25 * x * x + 0.0001)
        return cp.maximum(x, 0)  # ReLU()
        # return max(x, 0)
        # return 1/(1+cp.exp(-x))  # sigmoid = 1 / 1+exp(-x)

def get_single_layer(x, D_I, D_H, D_O, W_IH, B_IH, W_HO, B_HO):  # simulate full connect
        node_H = []
        for i in range(D_H):
                w_IH = W_IH[i]
                sum = 0
                for j in range(D_I):
                        sum += w_IH[j] * x[j]
                node_H.append(act(sum + B_IH[i]))

        node_O = []
        for i in range(D_O):
                w_HO = W_HO[i]
                sum = 0
                for j in range(D_H):
                       sum +=  w_HO[j] * node_H[j]
                node_O.append(sum + B_HO[i])
        return node_O[0]


# maybe HH-layer can be a list, using for loop add more layer
# only single-layer, HH-layer can be [], len(HH)==0
def get_muti_layer(x, D_I, D_H, D_O, W_IH, B_IH, W_HH, B_HH, W_HO, B_HO):  # simulate full connect
        node_H = []
        for i in range(D_H):
                w_IH = W_IH[i]
                sum = 0
                for j in range(D_I):
                        sum += w_IH[j] * x[j]
                node_H.append(act(sum + B_IH[i]))

        node_HH = []
        for i in range(D_H):
                w_HH = W_HH[i]
                sum = 0
                for j in range(D_H):
                        sum += w_HH[j] * node_H[j]
                node_HH.append(act(sum + B_HH[i]))

        node_O = []
        for i in range(D_O):
                w_HO = W_HO[i]
                sum = 0
                for j in range(D_H):
                       sum +=  w_HO[j] * node_HH[j]
                node_O.append(sum + B_HO[i])
        return node_O[0]

def getExpr(x, model):
        # model = cdinn2.gen_nn()
        # model.load_state_dict(torch.load('cdinn-model/cdinn-trained_eg2_cdinn2_20_1_0-1_relu.pt'), strict=True)

        D_I = DIM  # 2 input dim
        D_O = 1 # output dim

        # x = cp.Variable(D_I, 'x')  # x_0, x_1
        # x = [ -1.24194249, -0.5]

        for name, p in model.named_parameters():
                if str(name) == "input_layer1.weight":
                        W_IH_1 = p.data.tolist()
                if str(name) == "input_layer1.bias":
                        B_IH_1 = p.data.tolist()
                if str(name) == "input_layer2.weight":
                        W_IH_2 = p.data.tolist()
                if str(name) == "input_layer2.bias":
                        B_IH_2 = p.data.tolist()

                if N_H == 2:
                        if str(name) == "hidden_layers1.0.weight":
                                W_HH_1 = p.data.tolist()
                        if str(name) == "hidden_layers1.0.bias":
                                B_HH_1 = p.data.tolist()
                        if str(name) == "hidden_layers2.0.weight":
                                W_HH_2 = p.data.tolist()
                        if str(name) == "hidden_layers2.0.bias":
                                B_HH_2 = p.data.tolist()

                if str(name) == "output_layer_linear_prim1.weight":
                        W_HO_1 = p.data.tolist()
                if str(name) == "output_layer_linear_prim1.bias":
                        B_HO_1 = p.data.tolist()
                if str(name) == "output_layer_linear_prim2.weight":
                        W_HO_2 = p.data.tolist()
                if str(name) == "output_layer_linear_prim2.bias":
                        B_HO_2 = p.data.tolist()

        if N_H == 1:
                f1 = get_single_layer(x, D_I, D_H, D_O, W_IH_1, B_IH_1, W_HO_1, B_HO_1)  # 0 hidden
                f2 = get_single_layer(x, D_I, D_H, D_O, W_IH_2, B_IH_2, W_HO_2, B_HO_2)  # 0 hidden
        if N_H == 2:
                f1 = get_muti_layer(x, D_I, D_H, D_O, W_IH_1, B_IH_1, W_HH_1, B_HH_1, W_HO_1, B_HO_1)  # 1 hidden
                f2 = get_muti_layer(x, D_I, D_H, D_O, W_IH_2, B_IH_2, W_HH_2, B_HH_2, W_HO_2, B_HO_2)  # 1 hidden
        # print("f1: ", f1)
        # print("f2: ", f2)
        str1 = str(f1)
        str1 = str1.replace("maximum", "ReLU").replace("x[0]", "x_0").replace("x[1]", "x_1").replace("@", "*").replace("* x", "x")
        print("f1: ", str1)
        str2 = str(f2)
        str2 = str2.replace("maximum", "ReLU").replace("x[0]", "x_0").replace("x[1]", "x_1").replace("@", "*").replace("* x", "x")
        print("\n\nf2: ", str2)
        # print("f1 - f2: ", f1-f2)
        return f1, f2

if __name__ == "__main__" :
        model = cdinn2.gen_nn()
        # model.load_state_dict(torch.load('../model/cdinn_c9_1_5_0.1_0.1_1_train.pt'), strict=True)
        model.load_state_dict(torch.load('../model/cdinn_eg2_1_20_0.1_0.1_1_train.pt'), strict=True)

        x = cp.Variable(DIM, name='x')  # x_0, x_1
        # x = [1.5, -0.89882507]
        # x = [0.4, 0.1]
        # array([ 1.03042101, -0.65322224]), array([ 1.53444191, -0.94558351])
        # array([1.04518126, -0.64934505]), array([1.51459322, -0.91090996])
        # [1., -0.80654314]
        # y = model(torch.tensor(data=x))
        # print("y: ", y)
        getExpr(x, model)


'''
cdinn_c9_1_5_0.1_0.1_1_train.pt:
f1:  3.697671200574064 * ReLU(-2.3555080130215034 x_0 + 0.5080064481768952 x_1 + 1.5978531042122697 x[2] + -1.732555707001236, 0.0) + 1.2779955196147708 * ReLU(-1.3431997266517848 x_0 + 1.2386176659961365 x_1 + 2.1742869309242843 x[2] + 1.0031074254147239, 0.0) + 5.7232867175423445 * ReLU(-3.221236563758908 x_0 + 0.09426886506960044 x_1 + 1.4617897182163435 x[2] + -1.2866187646221285, 0.0) + 1.7267570373672696 * ReLU(-1.62133122889432 x_0 + 2.0590029841403044 x_1 + 2.650956414481197 x[2] + 0.7155050668650623, 0.0) + 4.923116607026241 * ReLU(4.43974810741452 x_0 + -1.1458881659168711 x_1 + -3.4407697987911297 x[2] + -2.0954810205499443, 0.0) + -1.8790103822750428
f2:  6.733199145955131 * ReLU(-0.8818492078016887 x_0 + 0.8700212904153117 x_1 + 0.464506584342843 x[2] + 5.1500721024819125, 0.0) + 3.709587119193221 * ReLU(1.2765578973353136 x_0 + -2.496805319676878 x_1 + 3.1420920554729883 x[2] + 3.068163017462594, 0.0) + 3.101726618130567 * ReLU(-1.2621896160269914 x_0 + 1.8584702353598221 x_1 + -1.8808559875052733 x[2] + 2.070827420748636, 0.0) + 3.56996634650788 * ReLU(-1.5734277917039292 x_0 + 1.429804758024462 x_1 + -1.2230775001926928 x[2] + 3.45007033028415, 0.0) + 4.3702140335710835 * ReLU(-1.381803554710274 x_0 + 1.4666608959491796 x_1 + 0.08380218150501274 x[2] + 4.4933020625358155, 0.0) + 2.396151653612465


cdinn_eg2_1_20_0.1_0.1_1_train.pt:
f1:  5.992429325103327 * ReLU(-8.695879238693298 x_0 + -4.60247364162369 x_1 + -1.7263153543633145, 0.0) + 0.0 * ReLU(0.21154923367863243 x_0 + 0.5372811906037872 x_1 + -1.217991956507937, 0.0) + 6.551299544038302 * ReLU(4.0726556055335275 x_0 + -6.534704631533768 x_1 + 1.4789023489184219, 0.0) + 20.93022670712791 * ReLU(2.646266834351836 x_0 + -4.659313831369517 x_1 + -4.838390151481466, 0.0) + 19.05074378961462 * ReLU(5.126428416532548 x_0 + -4.024086162282784 x_1 + -7.517105166548796, 0.0) + 5.987957727492485 * ReLU(5.1865824834483805 x_0 + 11.198855412512401 x_1 + 0.27091174861852313, 0.0) + 6.511543169367314 * ReLU(2.5975498689695486 x_0 + -6.511407308248731 x_1 + 1.092291459974705, 0.0) + 1.7739237955997287 * ReLU(-1.4258810657372383 x_0 + 3.0240465592557326 x_1 + -1.34639646037602, 0.0) + 5.663790297510382 * ReLU(2.4593838455320824 x_0 + -6.56926839456663 x_1 + 3.099016799686832, 0.0) + 0.0 * ReLU(0.228561561549288 x_0 + 0.48980721622515 x_1 + -0.5487046589721247, 0.0) + 5.590287401615001 * ReLU(-8.098689728462949 x_0 + -1.8474493727982249 x_1 + -2.1894353877094557, 0.0) + 2.2773258321239775 * ReLU(-1.5544006284730265 x_0 + 2.8965441360764776 x_1 + 1.6660855599730613, 0.0) + 3.6310126024401606 * ReLU(1.5997469407801816 x_0 + 8.662548298875288 x_1 + 1.1255656376111456, 0.0) + 5.317368608148567 * ReLU(2.4746107976236966 x_0 + 10.878035476346858 x_1 + 0.9471760283890165, 0.0) + 24.841074545086542 * ReLU(-2.0935422082703474 x_0 + 9.003994509653745 x_1 + -6.3781011793580635, 0.0) + 5.522345514278157 * ReLU(6.864439766953847 x_0 + 7.703313998679544 x_1 + 0.45293170519901604, 0.0) + 5.4107402403218074 * ReLU(2.000582152865797 x_0 + -7.781627087515488 x_1 + 0.8866943428426444, 0.0) + 5.541962707329192 * ReLU(7.4339853970186605 x_0 + 4.458135978781455 x_1 + 0.37630906200545905, 0.0) + 6.234432589787292 * ReLU(5.056997183498827 x_0 + 8.06832690149099 x_1 + 0.19220609232326516, 0.0) + 5.7722270349305385 * ReLU(-7.658508728428037 x_0 + -1.9265155571109946 x_1 + -0.2108641030590197, 0.0) + -1.8990406999587903
f2:  10.151848901131657 * ReLU(-3.6854499123076923 x_0 + -3.303851764111163 x_1 + -7.690798226301918, 0.0) + 3.328744073088331 * ReLU(-2.592684659219061 x_0 + -1.6518822537269933 x_1 + -3.736997184996804, 0.0) + 9.99983066064951 * ReLU(7.807393729018035 x_0 + 0.06398825775540916 x_1 + -3.1535998838946067, 0.0) + 2.9286187905114325 * ReLU(-4.612700547962939 x_0 + 0.7441061789637711 x_1 + 1.8136872850602141, 0.0) + 0.0 * ReLU(-0.440045407966252 x_0 + 0.3334198280003097 x_1 + -1.6110945094777234, 0.0) + 8.842200589339274 * ReLU(-2.582537035028631 x_0 + -3.0501556996540335 x_1 + -5.178099864173099, 0.0) + 2.0825393620712407 * ReLU(-5.358327201928461 x_0 + 1.1919116029081387 x_1 + 2.046463492103372, 0.0) + 4.286574732255281 * ReLU(-3.026235663199542 x_0 + -3.263381094552745 x_1 + 3.0960926824174226, 0.0) + 2.775915412844317 * ReLU(-5.1944388729407756 x_0 + 0.8419161947987284 x_1 + 2.03015912973406, 0.0) + 3.1838232655919443 * ReLU(-3.3304871602060206 x_0 + -3.7361657036009457 x_1 + 3.121959235825688, 0.0) + 4.577154781816285 * ReLU(-6.399140696022548 x_0 + -3.0878006848884225 x_1 + 2.881668373256814, 0.0) + 5.643750955579928 * ReLU(6.201664662435714 x_0 + 2.356580716579733 x_1 + 7.083553772516508, 0.0) + 15.72221581386625 * ReLU(6.044050436258437 x_0 + 2.8881036783181853 x_1 + -4.598343573893877, 0.0) + 9.315880140618216 * ReLU(4.776756139876112 x_0 + -7.94126888425619 x_1 + -1.378370710486846, 0.0) + 5.623210641823226 * ReLU(6.53394168536783 x_0 + 0.8675580204169374 x_1 + 7.204244827225664, 0.0) + 14.127795857753686 * ReLU(4.758823021709393 x_0 + 3.6829355281137124 x_1 + -4.199564064483428, 0.0) + 13.837703034243908 * ReLU(-3.7870072352445106 x_0 + -2.92514243022294 x_1 + -7.296551734499167, 0.0) + 4.7064689561382425 * ReLU(4.110670623995434 x_0 + 0.7556467549608683 x_1 + 5.440009504153229, 0.0) + 4.182595118424766 * ReLU(-4.772588343027734 x_0 + 0.7480928298176823 x_1 + 1.8283399878680355, 0.0) + 2.6892572545055726 * ReLU(-4.6840195809681076 x_0 + 0.7296624100483472 x_1 + 1.8357989398363623, 0.0) + 1.8571746696196079

'''

















