'''
    This is used to store the correspondence between the basic data set and the feature (using h 5)
'''

import os

import h5py
import torch

from src.CNNModel.Net.VGG16 import SE_VGG
import matplotlib.image as  mpimg
import matplotlib.pyplot as plt
import LoadImage as lim
import numpy as np
import torchvision.models as models

# Get the path of all pictures
def get_imlist(path):
    return [os.path.join(path, f) for f in os.listdir(path) if f.endswith('.jpg')]

if __name__ == '__main__':

    features = [] # store eigenvalues
    paths = [] # Storage path

    datapath = 'DB/pictures' # Data address
    output = 'FeaturesAndPaths/vgg_featureCNN.h5'
    img_list = get_imlist(datapath) # Get image data
    # print(img_list)
    print('------------------------------------------\n'
          '       feature extraction starts          \n'
          '------------------------------------------')

    # model = SE_VGG(50) # (*,3,224,224)
    model = models.vgg16(pretrained=False)
    pthfile = 'Model/state/vgg16-397923af.pth' # The address of the pre-trained model
    model.load_state_dict(torch.load(pthfile))
    model.eval() # Indicates for forecasting
    # print(model)

    for idx,image_path in enumerate(img_list):
        '''
        # 测试
        print(image_path)
        queryImg = mpimg.imread(image_path)
        plt.title("Query Image")
        plt.imshow(queryImg)
        plt.show()
        '''
        # Deal with one by one
        img = lim.MyLoader(image_path) # Load picture
        img = lim.transform(img) # Image enhancement processing
        img = img.view(1,3,224,224) # To four-dimensional conversion (use DataLoader may be better --- lazier)
        norm_feat = model(img) # Get features, you can change the network
        norm_feat = norm_feat.view(norm_feat.size(1))
        features.append(norm_feat.detach().numpy()) # storage characteristics
        paths.append(image_path) # store the corresponding path
        print(f'-------- Path : {image_path} 处理完毕-------')

    features = np.array(features)
    print(features,'\n',paths)

    print('------------------------------------------\n'
          '   writing feature extractions results    \n'
          '------------------------------------------')

    # print(np.string_(paths))
    h5f = h5py.File(output,'w') # create h5 file
    h5f.create_dataset('features',data=features)
    h5f.create_dataset('paths',data=np.string_(paths))
    h5f.close()

    print('------------------------------------------\n'
          '           writing successfully           \n'
          '------------------------------------------')



