# Copyright 2013    Yajie Miao    Carnegie Mellon University

# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#  http://www.apache.org/licenses/LICENSE-2.0
#
# THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
# WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
# MERCHANTABLITY OR NON-INFRINGEMENT.
# See the Apache 2 License for the specific language governing permissions and
# limitations under the License.

# import cPickle
# import gzip
import os
import sys
import time

import numpy

import theano
import theano.tensor as T
from theano.tensor.shared_randomstreams import RandomStreams


class HiddenLayer_Str(object):
    def __init__(self, rng, input, n_in, n_out, W=None, b=None,
                 activation=T.tanh, do_maxout = False, pool_size = 1,
                 do_pnorm = False, pnorm_order = 1, ivecDim=25, hivecDim=25,W1=None, W2=None,W3=None, b1=None,b2=None, warm=False, adapt=False):
        """ Class for hidden layer """
        self.input = input
        self.n_in = n_in
        self.n_out = n_out


        n_in1 = n_in - ivecDim
        n_in2 = ivecDim

        n_out1 = n_out
        n_out2 = hivecDim

        self.n_in1 = n_in1
        self.n_in2 = n_in2
        self.n_out1 = n_out1
        self.n_out2 = n_out2

        input1 = input[:,:n_in1]
        input2 = input[:,n_in1:]

        self.input1 = input1
        self.input2 = input2

        if W1 is None:
            W1_values = numpy.asarray(rng.uniform(
                    low=-numpy.sqrt(6. / (n_in1 + n_out1)),
                    high=numpy.sqrt(6. / (n_in1 + n_out1)),
                    size=(n_in1, n_out1)), dtype=theano.config.floatX)
            if activation == theano.tensor.nnet.sigmoid:
                W1_values *= 4

        if W2 is None:
            W2_values = numpy.asarray(rng.uniform(
                    low=-numpy.sqrt(6. / (n_in2 + n_out1)),
                    high=numpy.sqrt(6. / (n_in2 + n_out1)),
                    size=(n_in2, n_out1)), dtype=theano.config.floatX)
            if activation == theano.tensor.nnet.sigmoid:
                W2_values *= 4

        if W3 is None:
            W3_values = numpy.asarray(rng.uniform(
                    low=-numpy.sqrt(6. / (n_in2 + n_out2)),
                    high=numpy.sqrt(6. / (n_in2 + n_out2)),
                    size=(n_in2, n_out2)), dtype=theano.config.floatX)
            if activation == theano.tensor.nnet.sigmoid:
                W3_values *= 4

        W1 = theano.shared(value=W1_values, name='W1', borrow=True)
        W2 = theano.shared(value=W2_values, name='W2', borrow=True)
        W3 = theano.shared(value=W3_values, name='W3', borrow=True)

        if b1 is None:
            b1_values = numpy.zeros((n_out1,), dtype=theano.config.floatX)
            b1 = theano.shared(value=b1_values, name='b1', borrow=True)

        if b2 is None:
            b2_values = numpy.zeros((n_out2,), dtype=theano.config.floatX)
            b2 = theano.shared(value=b2_values, name='b2', borrow=True)


        #This is the delta_s part. This is going to change the ivector.
        s_values = numpy.zeros((n_in2,), dtype=theano.config.floatX)
        s = theano.shared(value=s_values, name='s_values', borrow=True)


        self.W1 = W1
        self.W2 = W2
        self.W3 = W3
        self.b1 = b1
        self.b2 = b2
        self.s = s

        self.delta_W1 = theano.shared(value = numpy.zeros((n_in1,n_out1),
                                     dtype=theano.config.floatX), name='delta_W1')

        self.delta_W2 = theano.shared(value = numpy.zeros((n_in2,n_out1),
                                     dtype=theano.config.floatX), name='delta_W2')

        self.delta_W3 = theano.shared(value = numpy.zeros((n_in2,n_out2),
                                     dtype=theano.config.floatX), name='delta_W3')

        self.delta_b1 = theano.shared(value = numpy.zeros_like(self.b1.get_value(borrow=True),
                                     dtype=theano.config.floatX), name='delta_b1')

        self.delta_b2 = theano.shared(value = numpy.zeros_like(self.b2.get_value(borrow=True),
                                     dtype=theano.config.floatX), name='delta_b2')

        self.delta_s = theano.shared(value = numpy.zeros_like(self.s.get_value(borrow=True),
                                     dtype=theano.config.floatX), name='delta_s')

        lin_output1 = T.dot(input1, self.W1) + T.dot(input2, self.W2) +self.b1
        # lin_output2 = T.dot(input2, self.W3) + self.b2
        lin_output2 = T.dot((input2+s), self.W3) + self.b2

        # T.concatenate([self.activation(lin_output1),self.activation(lin_output2)],axis=1)

        # self.output = (T.concatenate([lin_output1,lin_output2],axis=1) if activation is None else T.concatenate([activation(lin_output1),activation(lin_output2)],axis=1))
        self.output = T.concatenate([activation(lin_output1),activation(lin_output2)],axis=1)

        # parameters of the model
        # if(warm):
        #     self.warm_params = [self.W2, self.W3, self.b2]
        #     self.warm_delta_params = [self.delta_W2, self.delta_W3, self.delta_b2]
        #
        # if(adapt):
        #     self.adapt_params = [self.b2]
        #     self.adapt_delta_params = [self.delta_b2]
        

        self.params = [self.W1,self.W2,self.W3,self.b1, self.b2,self.s]
        self.delta_params = [self.delta_W1, self.delta_W2, self.delta_W3, self.delta_b1, self.delta_b2,self.delta_s]

        # if W is None:
        #     W_values = numpy.asarray(rng.uniform(
        #             low=-numpy.sqrt(6. / (n_in + n_out)),
        #             high=numpy.sqrt(6. / (n_in + n_out)),
        #             size=(n_in, n_out)), dtype=theano.config.floatX)
        #     if activation == theano.tensor.nnet.sigmoid:
        #         W_values *= 4
        #
        #     W = theano.shared(value=W_values, name='W', borrow=True)
        #
        # if b is None:
        #     b_values = numpy.zeros((n_out,), dtype=theano.config.floatX)
        #     b = theano.shared(value=b_values, name='b', borrow=True)

        # self.W = W
        # self.b = b
        #
        # self.delta_W = theano.shared(value = numpy.zeros((n_in,n_out),
        #                              dtype=theano.config.floatX), name='delta_W')
        #
        # self.delta_b = theano.shared(value = numpy.zeros_like(self.b.get_value(borrow=True),
        #                              dtype=theano.config.floatX), name='delta_b')
        #
        # lin_output = T.dot(input, self.W) + self.b
        # self.output = (lin_output if activation is None else activation(lin_output))
        #
        # # parameters of the model
        # self.params = [self.W, self.b]
        # self.delta_params = [self.delta_W, self.delta_b]

class HiddenLayer(object):
    def __init__(self, rng, input, n_in, n_out, W=None, b=None,
                 activation=T.tanh, do_maxout = False, pool_size = 1,
                 do_pnorm = False, pnorm_order = 1):
        """ Class for hidden layer """
        self.input = input
        self.n_in = n_in
        self.n_out = n_out
        
        if W is None:
            W_values = numpy.asarray(rng.uniform(
                    low=-numpy.sqrt(6. / (n_in + n_out)),
                    high=numpy.sqrt(6. / (n_in + n_out)),
                    size=(n_in, n_out)), dtype=theano.config.floatX)
            if activation == theano.tensor.nnet.sigmoid:
                W_values *= 4

            W = theano.shared(value=W_values, name='W', borrow=True)

        if b is None:
            b_values = numpy.zeros((n_out,), dtype=theano.config.floatX)
            b = theano.shared(value=b_values, name='b', borrow=True)

        self.W = W
        self.b = b

        self.delta_W = theano.shared(value = numpy.zeros((n_in,n_out),
                                     dtype=theano.config.floatX), name='delta_W')

        self.delta_b = theano.shared(value = numpy.zeros_like(self.b.get_value(borrow=True),
                                     dtype=theano.config.floatX), name='delta_b')

        lin_output = T.dot(input, self.W) + self.b
        if do_maxout == True:
            self.last_start = n_out - pool_size
            self.tmp_output = lin_output[:,0:self.last_start+1:pool_size]
            for i in range(1, pool_size):
                cur = lin_output[:,i:self.last_start+i+1:pool_size]
                self.tmp_output = T.maximum(cur, self.tmp_output)
            self.output = activation(self.tmp_output)
        elif do_pnorm == True:
            self.last_start = n_out - pool_size
            self.tmp_output = abs(lin_output[:,0:self.last_start+1:pool_size]) ** pnorm_order
            for i in range(1, pool_size):
                cur = abs(lin_output[:,i:self.last_start+i+1:pool_size]) ** pnorm_order
                self.tmp_output = self.tmp_output + cur
            self.tmp_output = self.tmp_output ** (1.0 / pnorm_order)
            self.output = activation(self.tmp_output)
        else:
            self.output = (lin_output if activation is None
                           else activation(lin_output))

        # parameters of the model
        self.params = [self.W, self.b]
        self.delta_params = [self.delta_W, self.delta_b]

        self.adapt_params = [self.W]
        self.adapt_delta_params = [self.delta_W]

def _dropout_from_layer(theano_rng, hid_out, p):
    """ p is the factor for dropping a unit """
    # p=1-p because 1's indicate keep and p is prob of dropping
    return theano_rng.binomial(n=1, p=1-p, size=hid_out.shape,
                               dtype=theano.config.floatX) * hid_out

class DropoutHiddenLayer(HiddenLayer):
    def __init__(self, rng, input, n_in, n_out,
                 W=None, b=None, activation=T.tanh, do_maxout = False, pool_size = 1, dropout_factor=0.5):
        super(DropoutHiddenLayer, self).__init__(
                rng=rng, input=input, n_in=n_in, n_out=n_out, W=W, b=b,
                activation=activation, do_maxout = do_maxout, pool_size = pool_size)

        self.theano_rng = RandomStreams(rng.randint(2 ** 30))

        self.dropout_output = _dropout_from_layer(theano_rng = self.theano_rng,
                                                  hid_out = self.output, p=dropout_factor)

