File size: 3,711 Bytes
47f59d4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
# %%
import gradio as gr
from sklearn import preprocessing
import os
import numpy as np
import pickle
import pandas as pd
import scipy.io as scio
# %%


def tansig(x):
    return (2/(1+np.exp(-2*x)))-1


def to_proba(output):
    output = output-np.min(output)
    ret = (output-np.min(output))/(np.max(output)-np.min(output))
    return ret


def softmax(x):
    x = x-np.min(x)
    ex = np.exp(x)
    return ex/ex.sum()


def load_data(path):

    # df = pd.read_csv(path)
    # return df.values
    data = scio.loadmat(path)
    traindata = np.double(data['trainx'])
    # trainlabel = np.double(data['trainy'])

    # testdata = np.double(data['testx'])
    # testlabel = np.double(data['testy'])
    return traindata[0]


# %%
labels = ['No MetS', 'MetS']


def predict(NCRF):

    params = np.load('params.npz')
    N1 = params['N1']
    N2 = params['N2']
    Beta1OfEachWindow = params['Beta1OfEachWindow']
    ymax = params['ymax']
    ymin = params['ymin']
    minOfEachWindow = params['minOfEachWindow']
    distOfMaxAndMin = params['distOfMaxAndMin']
    weightOfEnhanceLayer = params['weightOfEnhanceLayer']
    parameterOfShrink = params['parameterOfShrink']
    OutputWeight = params['OutputWeight']

    # test_x = load_data(file)
    test_x = NCRF.values
    # print(test_x.shape)
    with open('GP.pickle', 'rb') as f:
        gp = pickle.load(f)

    gp_features = gp.transform(test_x)
    test_x = np.hstack((test_x, gp_features))
    # print(test_x.shape)
    test_x = preprocessing.scale(test_x, axis=1)
    # test_x = np.expand_dims(test_x, axis=0)

    FeatureOfInputDataWithBiasTest = np.hstack(
        [test_x, 0.1 * np.ones((test_x.shape[0], 1))])
    OutputOfFeatureMappingLayerTest = np.zeros(
        [test_x.shape[0],  N2 * N1])

    for i in range(N2):
        outputOfEachWindowTest = np.dot(
            FeatureOfInputDataWithBiasTest,  Beta1OfEachWindow[i])
        OutputOfFeatureMappingLayerTest[:,  N1*i: N1*(i+1)] = (ymax - ymin)*(
            outputOfEachWindowTest - minOfEachWindow[i]) / distOfMaxAndMin[i] - ymin

    InputOfEnhanceLayerWithBiasTest = np.hstack(
        [OutputOfFeatureMappingLayerTest, 0.1 * np.ones((OutputOfFeatureMappingLayerTest.shape[0], 1))])
    tempOfOutputOfEnhanceLayerTest = np.dot(
        InputOfEnhanceLayerWithBiasTest,  weightOfEnhanceLayer)

    OutputOfEnhanceLayerTest = tansig(
        tempOfOutputOfEnhanceLayerTest * parameterOfShrink)

    InputOfOutputLayerTest = np.hstack(
        [OutputOfFeatureMappingLayerTest, OutputOfEnhanceLayerTest])

    OutputOfTest = np.dot(InputOfOutputLayerTest,  OutputWeight)
    # print(OutputOfTest.shape)
    OutputOfTest = np.squeeze(OutputOfTest)
    proba = OutputOfTest  # to_proba(OutputOfTest)
    confidences = {labels[i]: float(proba[i]) for i in range(len(labels))}
    # np.argmax(OutputOfTest)  # np.array(OutputOfTest)
    return confidences  # int(np.argmax(OutputOfTest)) #


# %%
# headers = ["性别", "年龄",	"腰臀比", "体脂率", "夜尿次数", "小便性质",
#           "舌色", "苔色",	"眼", "证型-淤血", "渴饮",	"夏季平均每日空调时常",	"吸烟",	"饮食肥腻", "BMI"]
headers = ["Gender", "Age",	"Waist-Hip Ratio", "Body fat rate", "Nocturia frequency", "Nature of pee",
           "Tongue color", "Fur color",	"Eye anomaly", "Congestion", "Thirsty",	"Daily air conditioner usage time",	"Smoking",	"Fatty diet", "BMI"]
iface = gr.Interface(fn=predict,
                     inputs=gr.DataFrame(headers=headers,
                                         row_count=(1, "fixed"),
                                         col_count=(15, "fixed"), wrap=True),
                     outputs=gr.Label()
                     )
iface.launch()

# %%