# -*- coding: utf-8 -*-
"""
Created on Tue Jan 12 19:11:28 2021

@author: Victor
"""
"""A deep MNIST classifier using convolutional layers.

"""
# Disable linter warnings to maintain consistency with tutorial.
# pylint: disable=invalid-name
# pylint: disable=g-bad-import-order



#from com.huai.converlution.resnets.resnets_utils import *

import tensorflow as tf
#from tensorflow.keras.applications.resnet50 import ResNet50
import numpy as np
import os
from tensorflow import keras
from tensorflow.keras import layers,Sequential
#import tensorflow_datasets as tfds
from keras.callbacks import ModelCheckpoint
from keras.applications.resnet50 import ResNet50

#数据预处理

path='/Users/victor/code_field/python/footre/spatial_AP_processed_scaled.npz'
path1='/Users/victor/code_field/python/footre/spatial_AP_val_processed_scaled.npz'
with np.load(path) as data:
    train_examples = data['x_train']
    train_labels = data['y_train']
with np.load(path1) as data:
    val_examples = data['x_val']
    val_labels=data['y_val']

path2='/Users/victor/code_field/python/footre/spatial_AP_test_processed_scaled.npz'

with np.load(path2) as data:
    test_examples = data['x_test']
    test_labels = data['y_test']


  
train_example_3=[]
val_example_3=[]
test_example_3=[] 

for j in train_examples:
    j=np.array([j for i in range(3)]).transpose(1,2,0)
    train_example_3.append(j)
    
train_example_3=np.asarray(train_example_3)

for j in val_examples:
    j=j=np.array([j for i in range(3)]).transpose(1,2,0)
    val_example_3.append(j)

val_example_3=np.asarray(val_example_3)

for j in test_examples:
    j=j=np.array([j for i in range(3)]).transpose(1,2,0)
    test_example_3.append(j)

test_example_3=np.asarray(test_example_3)

print(train_example_3.shape)
print(val_example_3.shape)  
print("test_shape")  
print(test_example_3[0].shape)
num_classes=2   

#train_examples = train_examples.reshape(train_examples.shape[0], 88, 88, 1) 

#val_examples = val_examples.reshape(val_examples.shape[0],88,88,1)   

#one_hot_labels_train =tf.one_hot(train_labels, 2)
#one_hot_labels_val = tf.one_hot(val_labels,2)

one_hot_labels_train = keras.utils.to_categorical(train_labels, num_classes=2)  # 将标签转换为one-hot编码
one_hot_labels_val = keras.utils.to_categorical(val_labels, num_classes=2)  # 将标签转换为one-hot编码
one_hot_labels_test=keras.utils.to_categorical(test_labels,num_classes=2)


train_dataset = tf.data.Dataset.from_tensor_slices((train_example_3, train_labels))
val_dataset=tf.data.Dataset.from_tensor_slices((val_example_3, val_labels))
test_dataset = tf.data.Dataset.from_tensor_slices((test_example_3, test_labels))  


# 打乱和批次化
BATCH_SIZE = 16
SHUFFLE_BUFFER_SIZE = 100


train_dataset = train_dataset.shuffle(2363).batch(BATCH_SIZE)
val_dataset=val_dataset.shuffle(7077).batch(BATCH_SIZE)
test_dataset = test_dataset.shuffle(550).batch(BATCH_SIZE)




#搭建模型 迁移学习

resnet50_fine_tune = keras.models.Sequential()
resnet50_fine_tune.add(ResNet50(include_top=False,weights='imagenet',input_shape=train_example_3[0].shape,
                 pooling='avg'))

#resnet50_fine_tune.add(tf.keras.layers.GlobalAveragePooling2D())
resnet50_fine_tune.add(tf.keras.layers.Dense(127))
resnet50_fine_tune.add(tf.keras.layers.Activation(activation='softmax'))
resnet50_fine_tune.add(tf.keras.layers.Dense(1, activation='sigmoid'))
resnet50_fine_tune.layers[0].trainable = False
    

#resnet50_fine_tune.add(keras.layers.Dense(num_classes,activation = 'sigmoid'))



resnet50_fine_tune.compile(optimizer=tf.keras.optimizers.RMSprop(),
              loss=tf.keras.losses.BinaryCrossentropy(),
                metrics=[tf.keras.metrics.BinaryAccuracy()])

    


print(resnet50_fine_tune.summary())
checkpointer = ModelCheckpoint(filepath='model_resnet50_fine_tune.h5', verbose=1, save_best_only=True)
history=resnet50_fine_tune.fit(train_dataset, batch_size=16,epochs=1,validation_data=val_dataset)

resnet50_fine_tune.save('model_resnet50_fine_tune.h5')

resnet50_fine_tune.evaluate(test_dataset)

#sess=tf.compat.v1.Session()
#tf.compat.v1.disable_eager_execution()
#params=tf.trainable_variables()
#feature=sess.run(resnet50_fine_tune.layers[1])
#print(feature)



'''
#16年论文的模型？

model = tf.keras.Sequential([
    tf.keras.layers.Input(shape=(88, 88, 3), dtype="float32"),
    tf.keras.layers.Conv2D(20, (7,7)),
    tf.keras.layers.BatchNormalization(),
    tf.keras.layers.Activation('relu'),
    
    tf.keras.layers.Conv2D(20, (7,7)),
    tf.keras.layers.BatchNormalization(),
    tf.keras.layers.Activation('relu'),
    
    tf.keras.layers.MaxPool2D(pool_size=(4,4)),
    tf.keras.layers.AveragePooling2D(pool_size=(4,4)),
    tf.keras.layers.GlobalAveragePooling2D(),
    tf.keras.layers.Dense(127),
    tf.keras.layers.Dense(1),
    tf.keras.layers.Activation('softmax')
])

model.compile(optimizer=tf.keras.optimizers.RMSprop(),
                loss=tf.keras.losses.CategoricalCrossentropy(),
                metrics=[tf.keras.metrics.CategoricalAccuracy()])

model.fit(train_dataset,batch_size=16, epochs=10,validation_data=val_dataset)

'''

#提取特征

temp_out=resnet50_fine_tune.get_layer(index=1).output


backbone = tf.keras.Model(inputs=resnet50_fine_tune.input,                                     
                          outputs=resnet50_fine_tune.layers[1].output)

train_feature=backbone.predict(train_example_3)
print("train_feature",train_feature,train_feature.shape)

np.savez("feature_train.npz",x_train=train_feature,y_train=train_labels)






test_feature=backbone.predict(test_example_3)
print("test_feature",test_feature,test_feature.shape)



from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler
from sklearn.svm import SVC
from sklearn.multiclass import OneVsRestClassifier
#from sklearn.linear_model import SGDClassifier

model = Pipeline([
    ("std_scaler", StandardScaler()),
    #("model", OneVsRestClassifier(SVC(kernel='rbf')))
    ("model", SVC(kernel='rbf', C=7, gamma=1))
    #("model", SGDClassifier())
])

model.fit(train_feature,train_labels)

y_pred = model.predict(test_feature)

from sklearn.metrics import accuracy_score

acc=accuracy_score(test_labels, y_pred)
print(acc)




#自己的resnet50

'''
class prepare(layers.Layer):
    
    def __init__(self):
        super(prepare, self).__init__()
        self.conv1=layers.Conv2D(64,(7,7),strides=1,padding="same")  #7*7
        self.bn=layers.BatchNormalization()
        self.Relu=layers.Activation('relu')
        self.mp=layers.MaxPool2D(pool_size=(2,2),strides=2)
            
    def call(self,inputs):
        x=self.conv1(inputs)
        x=self.bn(x)
        x=self.Relu(x)
        x=self.mp(x)
        return x
    def get_config(self):  #在有自定义网络层时，需要保存模型时，重写get_config函数
        config = {}
        base_config = super(prepare, self).get_config()
        return dict(list(base_config.items()) + list(config.items()))

class block(layers.Layer):    
    def __init__(self,filter_num,stride=1,is_first=False):
        super(block,self).__init__()
        self.conv1=layers.Conv2D(filter_num,(1,1),strides=1)
        self.bn1=layers.BatchNormalization()
        
        self.conv2=layers.Conv2D(filter_num,(3,3),strides=stride,padding='same')
        self.bn2=layers.BatchNormalization()
        
        self.conv3=layers.Conv2D(filter_num*4,(1,1),strides=1)
        self.bn3=layers.BatchNormalization()
        
        self.relu=layers.Activation('relu')
        if stride!=1 or is_first==True:
            self.downsample=Sequential()
            self.downsample.add(layers.Conv2D(filter_num*4,(1,1),strides=stride))
        else:
            self.downsample=lambda x:x
    def call(self,inputs):
        x=self.conv1(inputs)
        x=self.bn1(x)
        x=self.relu(x)
        
        x=self.conv2(x)
        x=self.bn2(x)
        x=self.relu(x)
        
        x=self.conv3(x)
        x=self.bn3(x)
        
        identity=self.downsample(inputs)
        output=layers.add([x,identity])
        output=tf.nn.relu(output)
        return output
    def get_config(self):  #在有自定义网络层时，需要保存模型时，重写get_config函数
        config = {}
        base_config = super(block, self).get_config()
        return dict(list(base_config.items()) + list(config.items()))
    
def get_model(num_classes):
    input_image = layers.Input(shape=(88, 88, 3), dtype="float32")
    out=prepare()(input_image)
    out=block(64,is_first=True)(out)
    out=block(64)(out)
    out=block(64)(out)
    out=block(128,stride=2)(out)
    out=block(128)(out)
    out=block(128)(out)
    out=block(256,stride=2)(out)
    out=block(256)(out)
    out=block(256)(out)
    out=block(512,stride=2)(out)
    out=block(512)(out)
    out=block(512)(out)
    out=layers.GlobalAveragePooling2D()(out)
    out=layers.Dense(num_classes)(out)
    out-layers.Activation('sigmoid')(out)
    return keras.Model(inputs=input_image, outputs=out)



model=get_model(2)
model.compile(optimizer=tf.keras.optimizers.RMSprop(),
              loss=tf.keras.losses.BinaryCrossentropy(),
                metrics=[tf.keras.metrics.BinaryAccuracy()])

checkpointer = ModelCheckpoint(filepath='model_resnet50.h5', verbose=1, save_best_only=True)
history=model.fit(train_dataset, batch_size=16,epochs=1,validation_data=val_dataset)

model.save('model_resnet50.h5')


model.evaluate(test_dataset)
'''


import matplotlib.pyplot as plt


plt.subplot(211)
plt.title("Accuracy")
plt.plot(history.history['binary_accuracy'], color="g", label="Train")
plt.plot(history.history["val_binary_accuracy"], color="b", label="Test")
plt.legend(loc="best")


 
plt.subplot(212)
plt.title("Loss")
plt.plot(history.history["loss"], color="g", label="Train")
plt.plot(history.history["val_loss"], color="b", label="Test")
plt.legend(loc="best")

 
plt.tight_layout()
plt.show()




