# library
import json
import sys
import os
import tensorflow as tf
from tensorflow import keras
from keras import regularizers

#sys.path.append('icdsweb')

from info.modules.api.neuralnet import shareglob
from info.modules.api.neuralnet.shareglob import *
from info.modules.api.neuralnet import craft_loader_out5
from info.modules.api.neuralnet.craft_all_five import bone, x1one, x2one,x3one, x4one, x5one, x6one, x7one, x8one, x9one, x10one, x11one, x12one, y1one, y2one, y3one, y4one, y5one

print('textilenum in craft_keras*.py file = ', textilenum)

if(textilenum>=9. and textilenum<=11.):
   import info.modules.api.neuralnet.craft_out5_10 as craft_out5_data
elif(textilenum>11. and textilenum<=15.):
   import info.modules.api.neuralnet.craft_out5_10_15 as craft_out5_data
elif(textilenum>15. and textilenum<=19.5):
   import info.modules.api.neuralnet.craft_out5_15_20 as craft_out5_data
elif(textilenum>19.5 and textilenum<=24.):
   import info.modules.api.neuralnet.craft_out5_20_24 as craft_out5_data
elif(textilenum>24. and textilenum<=29.5):
   import info.modules.api.neuralnet.craft_out5_25_30 as craft_out5_data
elif(textilenum>29.5 and textilenum<=35.):
   import info.modules.api.neuralnet.craft_out5_30_35 as craft_out5_data
elif(textilenum>35. and textilenum<=40.5):
   import info.modules.api.neuralnet.craft_out5_35_40 as craft_out5_data
elif(textilenum>40. and textilenum<=59.5):
   import info.modules.api.neuralnet.craft_out5_40 as craft_out5_data
#from polls.prediction import craft_loader_all_five
#from polls.prediction import craft_all_five

import matplotlib.pyplot as plt
import numpy as np
import random


def main():#filename):
   run_network()
#   make_plot()
#   testpy()
                       
def multilayer_network():

    model=keras.models.Sequential()
    model.add(keras.layers.Dense(input_shape=(n_input,), units=n_hidden1, kernel_regularizer=regularizers.l2(eta), activation='sigmoid'))
#    model.add(keras.layers.Dense(n_hidden1, activation='sigmoid'))
    model.add(keras.layers.Dense(n_hidden2, kernel_regularizer=regularizers.l2(eta), activation='sigmoid'))
    model.add(keras.layers.Dense(n_hidden3, kernel_regularizer=regularizers.l2(eta), activation='sigmoid'))
    model.add(keras.layers.Dense(n_hidden4, kernel_regularizer=regularizers.l2(eta), activation='sigmoid'))
    model.add(keras.layers.Dense(n_hidden5, kernel_regularizer=regularizers.l2(eta), activation='sigmoid'))
    model.add(keras.layers.Dense(n_hidden6, kernel_regularizer=regularizers.l2(eta), activation='sigmoid'))
    model.add(keras.layers.Dense(n_output, activation=None))

    return model

def run_network():#filename):

 #Graph input

 g1=tf.Graph()
 with g1.as_default():
   x = tf.placeholder(tf.float32, [None, n_input])
   y = tf.placeholder(tf.float32, [None, n_output])
   

   datar=1
   if datar ==1:
      # data source
      training_data, vali_data, test_data = craft_loader_out5.load_data_wrapper()

#   pred = multilayer_network(x)
   print('ok for now in nn.') 
  
   model = multilayer_network()
   model.compile(loss='mse', optimizer='adam')
   print('ok for now in cost')
  
   #Initialize the variables
   init = tf.initialize_all_variables()

# with tf.Session(config=config) as sess:
 config = tf.ConfigProto()
 config.gpu_options.allow_growth = True
 with tf.Session(config=config, graph=g1) as sess:
   sess.run(init)

   predva=np.array([[]])
   tocost=np.array([])

   def generator(training_data, b_size):
     total_batch = int(len(training_data)/batch_size)
     random.shuffle(training_data)

     while True:
      #Loop over all batches
#      for i in range(total_batch):
      for i in range(total_batch-2):
         batch_x =[x for x,y in training_data[i*batch_size:(i+1)*batch_size]]
         batch_y =[y for x,y in training_data[i*batch_size:(i+1)*batch_size]]
         #yield (batch_x, batch_y)
         yield (np.array(batch_x), np.array(batch_y))

   def vali_generator(vali_data, batch_size):
     total_batch = int(len(vali_data)/batch_size)
     random.shuffle(vali_data)

     while True:
      #Loop over all batches
#      for i in range(int(total_batch/2)):
      for i in range(total_batch-2, total_batch):
         batch_x =[x for x,y in vali_data[i*batch_size:(i+1)*batch_size]]
         batch_y =[y for x,y in vali_data[i*batch_size:(i+1)*batch_size]]
         #yield (batch_x, batch_y)
         yield (np.array(batch_x), np.array(batch_y))

   model.fit_generator(generator(training_data, batch_size), validation_data=vali_generator(vali_data, batch_size),validation_steps=30,
					steps_per_epoch=np.ceil(len(training_data)/batch_size),
					epochs=1000
)

#   model.fit_generator(generator(training_data, batch_size),
#					steps_per_epoch=np.ceil(len(training_data)/batch_size),
#					epochs=3
#)

   #Training cycle
#   for epoch in range(nepoch):
   for epoch in range(1):
      avg_cost = 0.
      total_batch = int(len(training_data)/batch_size)
      random.shuffle(training_data)
      #Loop over all batches
      for i in range(total_batch):
         batch_x =[x for x,y in training_data[i*batch_size:(i+1)*batch_size]]
         batch_y =[y for x,y in training_data[i*batch_size:(i+1)*batch_size]]
#         c =model.train_on_batch(batch_x, batch_y)  # only GPU works
         c =model.train_on_batch(np.array(batch_x), np.array(batch_y))  # both GPU and CPU work
         #_, c = sess.run([optimizer, cost], feed_dict={x: batch_x, y: batch_y})
         # Compute average loss
         avg_cost += c / total_batch
      tocost=np.append(tocost,avg_cost)
      # Display logs per epoch step
      if epoch % display_step == 0:
         print("Epoch:", '%04d' % (epoch+1), "cost=", \
             "{:.9f}".format(avg_cost))


   if(textilenum>=9. and textilenum<=11.):
      filepath='weights-biases-out5-10.hdf5'
   elif(textilenum>11. and textilenum<=15.):
      filepath='weights-biases-out5-10-15.hdf5'
   elif(textilenum>15. and textilenum<=19.5):
      filepath='weights-biases-out5-15-20.hdf5'
   elif(textilenum>19.5 and textilenum<=24.):
      filepath='weights-biases-out5-20-24.hdf5'
   elif(textilenum>24. and textilenum<=29.5):
      filepath='weights-biases-out5-25-30.hdf5'
   elif(textilenum>29.5 and textilenum<=35.):
      filepath='weights-biases-out5-30-35.hdf5'
   elif(textilenum>35. and textilenum<=40.):
      filepath='weights-biases-out5-35-40.hdf5'
   elif(textilenum>40. and textilenum<=59.5):
      filepath='weights-biases-40.hdf5'

   model.save(filepath)


if __name__ == "__main__":
	 main()
