#!/usr/bin/env python
# -*- coding: utf-8 -*-
import timeit
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import BayesianRidge
from sklearn.tree import DecisionTreeRegressor
from sklearn import cross_validation
from sklearn import metrics
from sknn.mlp import Regressor, Layer
from sklearn import preprocessing
import numpy as np
from scipy.stats.stats import pearsonr

from sklearn import tree
from sklearn.externals.six import StringIO  

def load_data(input_file, X_size, y_index, test_size):
    X = []
    y = []
    
    with open(input_file, 'r') as f:
        f.readline()
        for line in f:
            arr = [float(i) for i in line.strip().split('\t')]
            X.append(arr[1:X_size])
            y.append(arr[y_index])
    f.close()
    print X[0]
    print 'len(X[]): %d, len(y[]): %d' % (len(X), len(y))
    
    X = preprocessing.scale(np.array(X))
    print X[0:2]
    
    X_train = X[:test_size]
    y_train = y[:test_size]
    X_test = X[test_size:]
    y_test = y[test_size:]
    print 'len(X_train[]): %d, len(y_train[]): %d' % (len(X_train), len(y_train))
    print 'len(X_test[]): %d, len(y_test[]): %d' % (len(X_test), len(y_test))
    print ''
    
    
    return X, y, X_train, y_train, X_test, y_test

def shrink_num(num):
    num_len = len(str(num))
    return 1. * num / (10 ** num_len)
    
def format_for_neural(X, X_train, X_test):
    X_format = []
    X_train_format = []
    X_test_format = []
    
    for row in X:
        X_format.append([shrink_num(i) for i in row])
    for row in X_train:
        X_train_format.append([shrink_num(i) for i in row])        
    for row in X_test:
        X_test_format.append([shrink_num(i) for i in row])     
    
    return X_format, X_train_format, X_test_format

def get_score(predictions, y_test):
    score = 0.
    size = 0
    for i, prediction in enumerate(predictions):
#         print 'Predicted: %s, Target: %s' % (prediction, y_test[i])
        if y_test[i] != 0:
            score += abs(prediction-y_test[i]) * 1. / y_test[i]
            size += 1
#     print 'mean-deviation: %.5f' % (score/size)   

        #for type
#     output = []
#     target = []
#     y = 0
#     t = 0
#     for i in range(1, len(predictions)+1):
#         if i % 7 == 0 and i != 0:
#             output.append(y)
#             target.append(t)
#             y = 0
#             t = 0
#         y += predictions[i-1]
#         t += y_test[i-1]
    
#     output.append(y)
#     target.append(t)    
#     print output
#     print target
#     y_test = target
#     predictions = output
    
#     for i in range(len(y_test)):
#         print 'Predicted: %s, Target: %s' % (predictions[i], y_test[i])
    
    #     print 'mean-deviation: %.5f' % (score/size)  
    get_deviation(predictions, y_test)      
    print 'Corelation Coefficient: %.5f' % \
            (pearsonr(predictions, y_test)[0])
    print 'Mean absolute error: %.5f' % \
            (metrics.mean_absolute_error(y_test, predictions))        
#     print 'Explained variance score: %.5f' % \
#             (metrics.explained_variance_score(y_test, predictions))
#     print 'Mean squared error: %.5f' % \
#             (metrics.mean_squared_error(y_test, predictions))
#     print 'Median absolute error: %.5f' % \
#             (metrics.median_absolute_error(y_test, predictions))
    print 'R² score, the coefficient of determination: %.5f' % \
            (metrics.r2_score(y_test, predictions))
    
def get_deviation(predictions, y_test):
    arr = []
    for i in range(len(predictions)):
        if 0 != y_test[i]:
            d = abs(y_test[i]-predictions[i])/y_test[i]
            
        arr.append(d)
    print 'Prediction deviation:'
#     print arr
    print 'mean: %.5f, variance: %.5f' % (np.mean(arr), np.var(arr))
    

def do_mlr(X_train, y_train, X_test, y_test):#muliple linear regression
    regressor = LinearRegression()
    regressor.fit(X_train, y_train)
    predictions = regressor.predict(X_test)
    
    print 'cross_validation: '
    print cross_validation.cross_val_score(regressor, X_train, y_train, cv=10)
    get_score(predictions, y_test)

def do_pr(X_train, y_train, X_test, y_test, p_degree=4):#polynomial regression
    quadratic_featurizer = PolynomialFeatures(degree=p_degree)
    X_train_quadratic = quadratic_featurizer.fit_transform(X_train)
    X_test_quadratic = quadratic_featurizer.fit_transform(X_test)
    regressor_quadratic = LinearRegression()
    regressor_quadratic.fit(X_train_quadratic, y_train)
    predictions = regressor_quadratic.predict(X_test_quadratic)
    
    get_score(predictions, y_test)
    print 'cross_validation: '
    print cross_validation.cross_val_score(regressor_quadratic, X_train_quadratic, y_train, cv=10)

def do_brr(X_train, y_train, X_test, y_test):#Bayesian Ridge Regression
    regressor = BayesianRidge()
    regressor.fit(X_train, y_train)
    predictions = regressor.predict(X_test)
    
    get_score(predictions, y_test)
    print 'cross_validation: '
    print cross_validation.cross_val_score(regressor, X_train, y_train, cv=10)
    
def do_nnr(X_train, y_train, X_test, y_test, units1=5, units2=5, rate=0.02, iter=30):#neural network regression
    regressor = Regressor(
        layers=[
            Layer("Softmax", units=units1),
            Layer("Softmax", units=units2),
            Layer("Linear")],
        learning_rate=rate,
        n_iter=iter)
    
    regressor.fit(np.array(X_train), np.array(y_train))
    predictions = regressor.predict(np.array(X_test))
    get_score(predictions, y_test)
#     print 'R-squared: %.5f' % nn.score(X_test, y_test)
#     print cross_validation.cross_val_score(nn, X, y, cv=10)

def do_dtr(X_train, y_train, X_test, y_test, p_max_depth=4):#decision tree regression
    regressor = DecisionTreeRegressor(max_depth=p_max_depth)
    regressor.fit(X_train, y_train)
    predictions = regressor.predict(X_test)
    
    get_score(predictions, y_test)
    print 'cross_validation: '
    print cross_validation.cross_val_score(regressor, X_train, y_train, cv=10)
    cross = cross_validation.cross_val_score(regressor, X_train, y_train, cv=7)
    print("Accuracy: %0.2f (+/- %0.2f)" % (cross.mean(), cross.std() * 2))
#     show_tree(regressor)
    
def show_tree(regressor):
    with open("tree.dot", 'w') as f:
        f = tree.export_graphviz(regressor, out_file=f)
    
def main():
    
    #20140801    5    5    0    1    36    37
    hour_file = 'format/hour.txt'
    X, y, X_train, y_train, X_test, y_test = load_data(hour_file, 6, 6, 0-18*7)
    
    #20140801    5    4    5    0    1    36    2
#     hour_type_file = 'format/hour_type.txt'
#     X, y, X_train, y_train, X_test, y_test = load_data(hour_type_file, 7, 7, 0-18*7*7)
    #2    4    1    36    5    26    309
#     day_type_file = '/home/ivy/git/passenger/passenger/resource/format/day_type_%d.txt' % line
#     day_type_file = '/home/ivy/git/passenger/passenger/resource/format/day_type_%d_complete.txt' % line
#     X, y, X_train, y_train, X_test, y_test = load_data(day_type_file, 4, 6, -49) #7days
    
    
    print 'muliple linear regression ...'
    start = timeit.default_timer()
    do_mlr(X_train, y_train, X_test, y_test)
    stop = timeit.default_timer()
    print 'run time: %.10fs' % (stop - start)
    print '--' * 20 + '\n'
      
    print 'Bayesian Ridge Regression ...'
    start = timeit.default_timer()
    do_brr(X_train, y_train, X_test, y_test)
    stop = timeit.default_timer()
    print 'run time: %.10fs' % (stop - start)
    print '--' * 20 + '\n'
      
    print 'polynomial regression ...'
    start = timeit.default_timer()
    do_pr(X_train, y_train, X_test, y_test, p_degree=5)#, p_degree=2  #for no type
    stop = timeit.default_timer()
    print 'run time: %.10fs' % (stop - start)
    print '--' * 20 + '\n'
      
    print 'decision tree regression ...'
    start = timeit.default_timer()
    do_dtr(X_train, y_train, X_test, y_test, p_max_depth=8)
    stop = timeit.default_timer()
    print 'run time: %.10fs' % (stop - start)
    print '--' * 20 + '\n'
    
#     for i in range(1, 15):
#         print 'polynomial regression ...', i
#         start = timeit.default_timer()
#         do_pr(X_train, y_train, X_test, y_test, p_degree=i)#, p_degree=2  #for no type
#         stop = timeit.default_timer()
#         print 'run time: %.10fs' % (stop - start)
#         print '--' * 20 + '\n'
        
#     for i in range(1, 15):
#         print 'decision tree regression ...', i
#         start = timeit.default_timer()
#         do_dtr(X_train, y_train, X_test, y_test, p_max_depth=i)
#         stop = timeit.default_timer()
#         print 'run time: %.10fs' % (stop - start)
#         print '--' * 20 + '\n'
        
#     X_format, X_train_format, X_test_format = format_for_neural(X, X_train, X_test)
#     print 'neural network regression ...'
#     do_nnr(X_train, y_train, X_test, y_test)
#     print '--' * 20
#     for units1 in range(2, 10):
#         for units2 in range(2, 10):
#             for i in range(2, 8):`
#                 rate = 10**(-i)
#                 print 'neural network regression ...', units1, units2, rate
#                 do_nnr(X_train, y_train, X_test, y_test, units1, units2, rate)
#                 print '--' * 20
    
if __name__ == '__main__':
    start = timeit.default_timer()
    
    main()
    
    stop = timeit.default_timer()
    print 'run time: %.10fs' % (stop - start)
    
