from __future__ import division
import os
import pprint
import scipy.io as sio
import numpy as np
from six.moves import xrange

pp = pprint.PrettyPrinter()

class SeismicData(object):
    '''
    class SeismicData, Using to read ".mat" data.
    Attention: This version can only read 3D seismic data with shape: [xline, inline, time]
    '''
    def __init__(self, file_path, data_name='T', sample_shape=[32, 32, 128], batch_size=64):

        self.file_path = file_path + '.mat'
        self.data_name = data_name        # data name in the dataset file '*.mat'
        self.sample_shape = sample_shape  # samples shape after croped. 1D list: [height, width, depth]
        self.batch_size = batch_size
        self.batch_idx = 0                # current batch we use to train (used in function: next_batch)

        self.batch_samples = self.load()  # seismic samples croped by shape 'sample_shape', batch_sample shape is: [batch, depth, height, width]

    def load(self):
        '''
        load .mat file of 3D seismic data which size is : x * y * time.

        return:
            data : reshaped data of size: [batches, depth, height, width, 1]
        '''

        # load mat
        if os.path.exists(self.file_path):
            data = sio.loadmat(self.file_path)
            data = data[self.data_name]
            print(' [!] load dataset "%s" success.....' % self.file_path)
        else:
            print(' [!] load dataset "%s" failed.....' % self.file_path)

        # reshape
        assert(len(data.shape) == 3 and all([data.shape[i] > self.sample_shape[i] for i in range(3)]))

        self.slice_cnt_x = np.int32(np.round(float(data.shape[0]) / self.sample_shape[0]))
        self.slice_cnt_y = np.int32(np.round(float(data.shape[1]) / self.sample_shape[1]))
        self.slice_cnt_z = np.int32(np.round(float(data.shape[2]) / self.sample_shape[2]))

        nsamples = np.int32(self.slice_cnt_x * self.slice_cnt_y * self.slice_cnt_z)
        croped_data = np.zeros([nsamples] + self.sample_shape)
        print(' [*] x_cnt:%d, y_cnt:%d, z_cnt:%d, sample_number / batch_size = %f........'
              % (self.slice_cnt_x, self.slice_cnt_y, self.slice_cnt_z, nsamples/self.batch_size))
        self.batch_cnt = nsamples // self.batch_size  # calc how many batches there is.

        self.x_step = self.__calc_step(data.shape[0], self.slice_cnt_x)
        self.y_step = self.__calc_step(data.shape[1], self.slice_cnt_y)
        self.z_step = self.__calc_step(data.shape[2], self.slice_cnt_z)

        idx = 0
        for t in range(self.slice_cnt_z - 1):
            z_beg = int(t * self.z_step)
            z_end = int(z_beg + self.sample_shape[2])

            for r in range(self.slice_cnt_x):
                x_beg = int(r * self.x_step)
                x_end = int(x_beg + self.sample_shape[0])

                for c in range(self.slice_cnt_y):
                    y_beg = int(c * self.y_step)
                    y_end = int(y_beg + self.sample_shape[1])

                    # croped_data[idx, ] = np.transpose(data[x_beg:x_end, y_beg:y_end, z_beg:z_end], (2, 0, 1))
                    croped_data[idx,] = data[x_beg:x_end, y_beg:y_end, z_beg:z_end]
                    idx += 1

        return croped_data

    def reconstruct(self, data):
        pass

    def next_batch(self):
        if self.batch_idx < self.batch_cnt:
            self.batch_idx += 1
            # get one batch samples.
            return  np.reshape(self.batch_samples[(self.batch_idx - 1)*self.batch_size :
                                                   self.batch_idx*self.batch_size, ],
                               [self.batch_size] + self.sample_shape + [1])

        else:
            return None

    def reset_batch_status(self):
        self.batch_idx = 0

    def __calc_step(self, length, sample_cnt):
        if sample_cnt > 0:
            return np.floor(length / sample_cnt)
        else:
            return 0

