#-*- encoding:utf-8 -*-
import tensorflow as tf
import dnn_net
from dnn_train import PATH_MODEL_SAVE
import numpy as np
import pre_handle_data as p_data
import matplotlib.pyplot as plt
from matplotlib.font_manager import *
plt.rcParams['font.sans-serif']=['simhei']
sys.setdefaultencoding('utf8')
matplotlib.rcParams['axes.unicode_minus'] = False
trace_num=400
num_point=200
slice_size=31
side_add_point_l=15
side_add_point_r=15


#please change these parmeters to predict the seismic data you wanted
file_name='data/test_data_200.bin'
str_1='FFID200'

view_1=1
view_2=200

fb_by_dnn=np.zeros(trace_num)
data_all_trace=p_data.get_test_trace(file_name)
first_breaking_t=0

feed_data_=tf.placeholder(tf.float32,[None,dnn_net.INPUT_NODE],name="feed_data")
output=dnn_net.get_dnn_net(feed_data_)
init=tf.global_variables_initializer()
saver=tf.train.Saver()
output_all=np.zeros((200,400))
with tf.Session() as sess:
    sess.run(init)
    ckpt=tf.train.get_checkpoint_state(PATH_MODEL_SAVE)
    if ckpt and ckpt.model_checkpoint_path:
        saver.restore(sess,ckpt.model_checkpoint_path)
        global_step=ckpt.model_checkpoint_path.split('/')[-1].split('-')[-1]
        arr_request = np.zeros((num_point, slice_size), dtype=float)
        for which_trace in range(trace_num):
            data_trace = data_all_trace[which_trace, :]
            add_data1 = np.zeros(side_add_point_l, dtype=float)
            add_data1[:]=data_trace[0]
            add_data2 = np.zeros(side_add_point_r, dtype=float)
            add_data2[:] = data_trace[-1]
            data_new = np.append(add_data1,data_trace)
            data_new = np.append(data_new,add_data2)
            for i in range(num_point):
                arr_request[i, :] = data_new[i:i + slice_size]
            output_=sess.run(output,feed_dict={feed_data_:arr_request})
            output_=np.array(output_)
            output_all[:,which_trace]=output_[:,0]
            temp = 0
            for i in range(len(output_)):
                if  output_[i]>temp:
                    first_breaking_t=i+1
                    temp=output_[i]
            fb_by_dnn[which_trace]=first_breaking_t
            print first_breaking_t
point=np.zeros((1,200))
for i in range(200):
    point[0,i]=i+1
plt.hold
plt.subplot(1,2,1)
plt.plot(data_all_trace[view_1-1,:],point[0,:])
plt.plot(output_all[:,view_1-1].T,point[0,:],'r')
plt.legend(('归一化数据','概率'))
plt.scatter(0,fb_by_dnn[view_1-1]-1)
plt.xlim(-0.1,1)
plt.ylim(1,200)
ax=plt.gca()
ax.invert_yaxis()
plt.title('DNN神经网络对'+str_1+'\n'+'第'+str(view_1)+'道'+'的拾取结果')
plt.ylabel('采样点号')
plt.xlabel('相对振幅/概率')
plt.subplot(1,2,2)
plt.plot(data_all_trace[view_2-1,:],point[0,:])
plt.plot(output_all[:,view_2-1].T,point[0,:],'r')
plt.legend(('归一化数据','概率'))
plt.scatter(0,fb_by_dnn[view_2-1]-1)
plt.xlim(-0.1,1)
plt.ylim(1,200)
ax=plt.gca()
ax.invert_yaxis()
plt.title('DNN神经网络对'+str_1+'\n'+'第'+str(view_2)+'道'+'的拾取结果')
plt.ylabel('采样点号')
plt.xlabel('相对振幅/概率')
plt.show()
with open('fb_dnn_'+str_1,'wb') as f:
    f.write(fb_by_dnn)
