"""
This code is based on the tutorial given on deeplearning.org


References:

    - textbooks: "Pattern Recognition and Machine Learning" -
                 Christopher M. Bishop, section 4.3.2

"""
__docformat__ = 'restructedtext en'

import cPickle
import gzip
import os
import sys
import time

import numpy

import theano
import theano.tensor as T


class LinearRegression(object):

    def __init__(self, input, input_dropout, 
                 n_in, numpy_rng, 
                 theano_rng,W=None,b=None,L1rec=None,L2rec=None,
                 dropout_rate = None):
        """ Initialize the parameters of the logistic regression

        :type input: theano.tensor.TensorType
        :param input: symbolic variable that describes the input of the
                      architecture (one minibatch)

        :type n_in: int
        :param n_in: number of input units, the dimension of the space in
                     which the datapoints lie

        """

        n_out = 1
        # initialize with 0 the weights W as a matrix of shape (n_in, n_out)
        if W == None:
            self.W = theano.shared(value=numpy.zeros((n_in, n_out),
                                                 dtype=theano.config.floatX),
                                name='W', borrow=True)
        else:
            self.W = W
        # initialize the baises b as a vector of n_out 0s
        if b == None:
            self.b = theano.shared(value=numpy.zeros((n_out,),
                                                 dtype=theano.config.floatX),
                               name='b', borrow=True)
        else:
            self.b = b
            
        self.input = T.matrix('x')
        
        self.dropout_rate = dropout_rate
        
        # compute vector of class-membership probabilities in symbolic form
        if dropout_rate == None:
            self.y_pred = T.dot(input, self.W) + self.b
        else:
            dropout_h = T.dot(input_dropout \
                        * T.cast(theano_rng.binomial(
                            size=input_dropout.shape, n=1, p=1-dropout_rate
                                    ), 
                                 dtype=theano.config.floatX
                            ),  \
                        self.W)
            h = T.dot(input, self.W * (1-dropout_rate))
            self.y_pred_dropout = dropout_h + self.b
            self.y_pred = h+self.b

        # parameters of the model
        self.params = [self.W, self.b]
        
        if L1rec == None:
            self.L1rec = abs(self.W).sum()
        else:
            self.L1rec = L1rec + abs(self.W).sum()
            
        
        if L2rec == None:
            self.L2rec = (self.W **2).sum()
        else:
            self.L2rec = L2rec + (self.W **2).sum()

    def cost(self,y,L1_reg,L2_reg):
        
#        r = (self.y_pred-y)*(self.y_pred-y)
#        c = T.mean(r) + L1_reg * self.L1rec + L2_reg * self.L2rec
        
        if self.dropout_rate == None:
            r = (self.y_pred-y)**2
            c = T.mean(r) + L1_reg * self.L1rec + L2_reg * self.L2rec
        else:
            r = (self.y_pred_dropout-y)**2
            c = T.mean(r) + L1_reg * self.L1rec + L2_reg * self.L2rec
        
        return c
    
    
    def errors(self, y):
        """Return a float representing the number of errors in the minibatch
        over the total number of examples of the minibatch ; zero one
        loss over the size of the minibatch

        :type y: theano.tensor.TensorType
        :param y: corresponds to a vector that gives for each example the
                  correct label
        """
        r = (self.y_pred-y)**2
        return T.mean(r)
        
