"""
@This file will be used to classify data of serverfault for numeric features
@Cross validation 70-30, 10-fold

"""
import numpy as np
import pylab
import matplotlib
import sklearn
from sklearn import ensemble
from sklearn import datasets
from sklearn.utils import shuffle
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.metrics import confusion_matrix
from sklearn.metrics import classification_report
from sklearn.metrics import accuracy_score
import MySQLdb
from sklearn.neighbors import KNeighborsClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.svm import SVC
from sklearn.ensemble import AdaBoostClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn.naive_bayes import GaussianNB
from sklearn.pipeline import Pipeline
from sklearn.feature_extraction.text import HashingVectorizer
import scipy.sparse
from sklearn import cross_validation
from sklearn import metrics
#from preprocess import pp

"""
port=3306
user="root"
password="123"
database="isec2014"
table_migrated = "train_migrated_serverfault_aug13"
table_not_migrated = "train_not_migrated_serverfault_aug13"



"""
site_name = "serverfault"
#site_name = "superuser"
#site_name = "programmers.stackexchange"
#site_name = "dba.stackexchange"
#site_name = "stackoverflow"
port=3307
user="sangeetal"
password="sangeetal"
database="sangeeta"
table_migrated = "train_migrated_"+site_name+"_aug13"
table_not_migrated = "train_not_migrated_"+site_name+"_aug13"
#"""


result_table ="classification_results"

db1= MySQLdb.connect(host="localhost", user=user, passwd=password,db=database, port=port)
select_cursor = db1.cursor()
insert_cursor = db1.cursor()
str_m = "select question_id , f1_title_len, f1_body_len, f1_reputation, f1_account_age , \
        f1_url_count, f1_number_of_tags, f1_have_popular_tag, f1_all_capital_count,\
        f1_first_capital_count,f1_special_char_count,f1_have_code  from `"+ table_migrated+"`"

print "str_m = ", str_m
select_cursor.execute(str_m)

m_other_feature = list()
m_data = list()
target = list()
not_m_data = list()

migrated_data = select_cursor.fetchall()
for temp_data in migrated_data:
    temp_f_val = list()
    question_id =temp_data[0]
    #title_len = temp_data[1]
    #body_len = temp_data[2]
    reputation = temp_data[3]
    account_age = temp_data[4]
    #url_count= temp_data[5]
    number_of_tags=temp_data[6]
    have_popular_tag=temp_data[7]
    #all_capital_count=temp_data[8]
    #first_capital_count=temp_data[9]
    #special_char_count=temp_data[10]
    #have_code = temp_data[11]
    
    #temp_f_val.append(title_len)
    #temp_f_val.append(body_len)
    temp_f_val.append(reputation)
    temp_f_val.append(account_age)
    #temp_f_val.append(url_count)
    temp_f_val.append(number_of_tags)
    temp_f_val.append(have_popular_tag)
    #temp_f_val.append(all_capital_count)
    #temp_f_val.append(first_capital_count)
    #temp_f_val.append(special_char_count)
    #temp_f_val.append(have_code)
    print "\n---------------------------------------------------------------------------"
    print "Qid =", question_id,"\n" 
    #print "title_len=", title_len, " \n" 
    #print "body_len =", body_len, " \n"
    print "reputation =", reputation, " \n"
    print "account_age =", account_age , " \n" 
    #print "url_count= ", url_count , " \n"
    print "number_of_tags=", number_of_tags, " \n"
    print "have_popular_tag=", have_popular_tag, " \n"
    #print "all_capital_count=", all_capital_count, " \n"
    #print "first_capital_count=", first_capital_count, " \n"
    #print "special_char_count=", special_char_count, " \n"
    #print "have_code =", have_code, " \n "
    
    print"\n----------------------------------------------------------------------------"
    m_other_feature.append(temp_f_val)
      
    target.append(1)

"""
not_migrated_count_str = "select count(*) from `"+ table_not_migrated+"`"
select_cursor.execute(not_migrated_count_str)
temp_data = select_cursor.fetchall()
not_migrated_count = 0
for t in temp_data:
    not_migrated_count = t[0]

print "Migrated count = ", len(m_other_feature)
print "Not migrated count =  ", not_migrated_count
"""    


str_not_m = "select question_id , f1_title_len, f1_body_len, f1_reputation, f1_account_age , \
        f1_url_count, f1_number_of_tags, f1_have_popular_tag, f1_all_capital_count,\
        f1_first_capital_count,f1_special_char_count,f1_have_code  from `"+ table_not_migrated+"`"

select_cursor.execute(str_not_m)
not_migrated_data = select_cursor.fetchall()
store_not_migrated_data = not_migrated_data

"""
print "Migrated count = ", len(m_other_feature)
print "Not migrated count =  ", len(not_migrated_data)
"""

total_dt_acc = 0.0
total_dt_precision =0.0
total_dt_recall =0.0
total_dt_f1 = 0.0
total_dt_roc = 0.0

total_knn_acc = 0.0
total_knn_precision =0.0
total_knn_recall =0.0
total_knn_f1 = 0.0
total_knn_roc = 0.0

total_ada_acc = 0.0
total_ada_precision =0.0
total_ada_recall =0.0
total_ada_f1 = 0.0
total_ada_roc = 0.0

total_gnb_acc = 0.0
total_gnb_precision =0.0
total_gnb_recall =0.0
total_gnb_f1 = 0.0
total_gnb_roc = 0.0


#migrated_only_target = target
i=1
while i<= len(migrated_data):
    i=i+1
    target.append(0) 

rand_array  = [0,1,2,3,4,5,6,7,8,9]
for random_seed_val in rand_array:
     
    not_migrated_data = store_not_migrated_data
    #target = migrated_only_target
    np.random.seed(random_seed_val)
    indices = np.random.permutation(len(not_migrated_data))[:len(m_other_feature)]

    #print "len not migrated", len(not_migrated_data)
    not_m_other_feature = list()
   
    valid_index=-1
    count =0
   
    print "len not migrated", len(not_migrated_data), " indecs len", len(target)
    for temp_data in not_migrated_data:
        valid_index= valid_index+1
        #print "I am here"
        if valid_index in indices:
            #print "i can not reach"
            temp_f_val = list()
            #question_id =temp_data[0]
            #title_len = temp_data[1]
            #body_len = temp_data[2]
            reputation = temp_data[3]
            account_age = temp_data[4]
            #url_count= temp_data[5]
            number_of_tags=temp_data[6]
            have_popular_tag=temp_data[7]
            #all_capital_count=temp_data[8]
            #first_capital_count=temp_data[9]
            #special_char_count=temp_data[10]
            #have_code = temp_data[11]
    
            #temp_f_val.append(title_len)
            #temp_f_val.append(body_len)
            temp_f_val.append(reputation)
            temp_f_val.append(account_age)
            #temp_f_val.append(url_count)
            temp_f_val.append(number_of_tags)
            temp_f_val.append(have_popular_tag)
            #temp_f_val.append(all_capital_count)
            #temp_f_val.append(first_capital_count)
            #temp_f_val.append(special_char_count)
            #temp_f_val.append(have_code)
            print "\n---------------------------------------------------------------------------"
            print "Qid =", question_id,"\n" 
            #print "title_len=", title_len, " \n" 
            #print "body_len =", body_len, " \n"
            print "reputation =", reputation, " \n"
            print "account_age =", account_age , " \n" 
            #print "url_count= ", url_count , " \n"
            print "number_of_tags=", number_of_tags, " \n"
            print "have_popular_tag=", have_popular_tag, " \n"
            #print "all_capital_count=", all_capital_count, " \n"
            #print "first_capital_count=", first_capital_count, " \n"
            #print "special_char_count=", special_char_count, " \n"
            #print "have_code =", have_code, " \n " 
            print"\n----------------------------------------------------------------------------"
            not_m_other_feature.append(temp_f_val)
        
            #target.append(0)
            count =count+1
            if count >=len(not_migrated_data):
                break

    print "len migrated data=", len(m_other_feature)
    print "len of not mmigrated data=", len(not_m_other_feature)
    print "target =", len(target)


    total_data = m_other_feature+not_m_other_feature
    cv = cross_validation.ShuffleSplit(len(target), n_iter=10, test_size=0.30, 
                                   random_state=random_seed_val)

    knn = KNeighborsClassifier(algorithm='auto', leaf_size=59, metric='minkowski',
         n_neighbors=6, p=2, weights='uniform')
    dt  =DecisionTreeClassifier(max_depth=5)
    gnb = GaussianNB() # Guasian Niave Bayes

    rf =  RandomForestClassifier(max_depth=5, n_estimators=10, max_features=1),
    ada =    AdaBoostClassifier(n_estimators=100)
    svc =     SVC(kernel="linear", C=0.025)

    dt_score = cross_validation.cross_val_score(dt,np.asarray(total_data), 
                                            np.asarray(target), cv=cv)
    dt_acc = cross_validation.cross_val_score(dt,np.asarray(total_data), 
         np.asarray(target), cv=cv, score_func=metrics.accuracy_score)
    dt_precision = cross_validation.cross_val_score(dt,np.asarray(total_data), 
              np.asarray(target), cv=cv,score_func=metrics.precision_score)
    dt_recall = cross_validation.cross_val_score(dt,np.asarray(total_data), 
            np.asarray(target), cv=cv,score_func=metrics.recall_score)
    dt_f1= cross_validation.cross_val_score(dt,np.asarray(total_data), 
            np.asarray(target), cv=cv,score_func=metrics.f1_score)
    dt_roc = cross_validation.cross_val_score(dt,np.asarray(total_data),
         np.asarray(target), cv=cv, score_func=metrics.roc_auc_score)
    print "dt =", dt_score.mean()
    print "dt accuracy =", dt_acc.mean()
    print "dt precision =", dt_precision.mean()
    print "dt reacll =", dt_recall.mean()
    print "dt f1=", dt_f1.mean()
    print "dt roc=", dt_roc.mean()
        
    total_dt_acc = total_dt_acc +dt_acc.mean()
    total_dt_precision = total_dt_precision +dt_precision.mean()
    total_dt_recall = total_dt_recall +dt_recall.mean()
    total_dt_f1 = total_dt_f1 + dt_f1.mean()
    total_dt_roc = total_dt_roc +dt_roc.mean()

    #ada_score = cross_validation.cross_val_score(ada,np.asarray(total_data), np.asarray(target), cv=cv)
    #print "ada = ", ada_score.mean()
    ada_score = cross_validation.cross_val_score(ada,np.asarray(total_data), 
                                            np.asarray(target), cv=cv)
    ada_acc = cross_validation.cross_val_score(ada,np.asarray(total_data), 
       np.asarray(target), cv=cv, score_func=metrics.accuracy_score)
    ada_precision = cross_validation.cross_val_score(ada,np.asarray(total_data), 
              np.asarray(target), cv=cv,score_func=metrics.precision_score)
    ada_recall = cross_validation.cross_val_score(ada,np.asarray(total_data), 
            np.asarray(target), cv=cv,score_func=metrics.recall_score)
    ada_f1= cross_validation.cross_val_score(ada,np.asarray(total_data), 
            np.asarray(target), cv=cv,score_func=metrics.f1_score)
    ada_roc = cross_validation.cross_val_score(ada,np.asarray(total_data),
         np.asarray(target), cv=cv, score_func=metrics.roc_auc_score)
    print "ada =", ada_score.mean()
    print "ada accuracy=", ada_acc.mean()
    print "ada precision=", ada_precision.mean()
    print "ada recall=", ada_recall.mean()
    print "ada f1=", ada_f1.mean()
    print "ada roc=", ada_roc.mean()
        
    total_ada_acc = total_ada_acc +ada_acc.mean()
    total_ada_precision = total_ada_precision +ada_precision.mean()
    total_ada_recall = total_ada_recall +ada_recall.mean()
    total_ada_f1 = total_ada_f1 + ada_f1.mean()
    total_ada_roc = total_ada_roc +ada_roc.mean()


    knn_score = cross_validation.cross_val_score(knn,np.asarray(total_data), np.asarray(target), cv=cv)
    print "knn = ", knn_score.mean()

    knn_score = cross_validation.cross_val_score(knn,np.asarray(total_data), 
                                            np.asarray(target), cv=cv)
    knn_acc = cross_validation.cross_val_score(knn,np.asarray(total_data), 
         np.asarray(target), cv=cv, score_func=metrics.accuracy_score)
    knn_precision = cross_validation.cross_val_score(knn,np.asarray(total_data), 
              np.asarray(target), cv=cv,score_func=metrics.precision_score)
    knn_recall = cross_validation.cross_val_score(knn,np.asarray(total_data), 
            np.asarray(target), cv=cv,score_func=metrics.recall_score)
    knn_f1= cross_validation.cross_val_score(knn,np.asarray(total_data), 
            np.asarray(target), cv=cv,score_func=metrics.f1_score)
    knn_roc = cross_validation.cross_val_score(knn,np.asarray(total_data),
         np.asarray(target), cv=cv, score_func=metrics.roc_auc_score)
    print "knn =", knn_score.mean()
    print "knn accuracy=", knn_acc.mean()
    print "knn precision=", knn_precision.mean()
    print "knn recall=", knn_recall.mean()
    print "knn f1=", knn_f1.mean()
    print "knn roc=", knn_roc.mean()

    total_knn_acc = total_knn_acc +knn_acc.mean()
    total_knn_precision = total_knn_precision +knn_precision.mean()
    total_knn_recall = total_knn_recall +knn_recall.mean()
    total_knn_f1 = total_knn_f1 + knn_f1.mean()
    total_knn_roc = total_knn_roc +knn_roc.mean()
   
    gnb_score = cross_validation.cross_val_score(gnb,np.asarray(total_data), 
                                            np.asarray(target), cv=cv)
    gnb_acc = cross_validation.cross_val_score(gnb,np.asarray(total_data), 
         np.asarray(target), cv=cv, score_func=metrics.accuracy_score)
    gnb_precision = cross_validation.cross_val_score(gnb,np.asarray(total_data), 
            np.asarray(target), cv=cv,score_func=metrics.precision_score)
    gnb_recall = cross_validation.cross_val_score(gnb,np.asarray(total_data), 
            np.asarray(target), cv=cv,score_func=metrics.recall_score)
    gnb_f1= cross_validation.cross_val_score(gnb,np.asarray(total_data), 
            np.asarray(target), cv=cv,score_func=metrics.f1_score)
    gnb_roc = cross_validation.cross_val_score(gnb,np.asarray(total_data),
         np.asarray(target), cv=cv, score_func=metrics.roc_auc_score)
    print "gnb =", gnb_score.mean()
    print "gnb accuracy=", gnb_acc.mean()
    print "gnb precision=", gnb_precision.mean()
    print "gnb recall=", gnb_recall.mean()
    print "gnb f1=", gnb_f1.mean()
    print "gnb roc=", gnb_roc.mean()

    total_gnb_acc = total_gnb_acc +gnb_acc.mean()
    total_gnb_precision = total_gnb_precision +gnb_precision.mean()
    total_gnb_recall = total_gnb_recall +gnb_recall.mean()
    total_gnb_f1 = total_gnb_f1 + gnb_f1.mean()
    total_gnb_roc = total_gnb_roc +gnb_roc.mean()
    #rf_score = cross_validation.cross_val_score(rf,np.asarray(total_data), np.asarray(target), cv=cv)
    #print "rf = ", rf_score.mean()

    #svc_score = cross_validation.cross_val_score(svc,np.asarray(total_data), np.asarray(target), cv=cv)
    #print " svc = ", svc_score.mean()

print "Final Average Accuracy of 10 Random Databases"
print"\n----------------------------------------------"

print "avg_dt_acc = ", (total_dt_acc*100)/10
print "Avg_dt_precision =", (total_dt_precision*100)/10
print  "Avg_dt_recall =", (total_dt_recall*100)/10
print  "Avg_dt_f1 =", (total_dt_f1*100)/10
print "Avg_dt_roc =", (total_dt_roc*100)/10

print "avg_ada_acc = ", (total_ada_acc*100)/10
print "Avg_ada_precision =", (total_ada_precision*100)/10
print  "Avg_ada_recall =", (total_ada_recall*100)/10
print  "Avg_ada_f1 =", (total_ada_f1*100)/10
print "Avg_ada_roc =", (total_gnb_roc*100)/10

print "avg_knn_acc = ", (total_knn_acc*100)/10
print "Avg_knn_precision =", (total_gnb_precision*100)/10
print  "Avg_knn_recall =", (total_knn_recall*100)/10
print  "Avg_knn_f1 =", (total_knn_f1*100)/10
print "Avg_knn_roc =", (total_knn_roc*100)/10

print "avg_gnb_acc = ", (total_gnb_acc*100)/10
print "Avg_gnb_precision =", (total_gnb_precision*100)/10
print  "Avg_gnb_recall =", (total_gnb_recall*100)/10
print  "Avg_gnb_f1 =", (total_gnb_f1*100)/10
print "Avg_gnb_roc =", (total_gnb_roc*100)/10

avg_dt_acc = (total_dt_acc*100)/10
avg_dt_precision =(total_dt_precision*100)/10
avg_dt_recall =(total_dt_recall*100)/10
avg_dt_f1 =(total_dt_f1*100)/10
avg_dt_roc = (total_dt_roc*100)/10

avg_ada_acc = (total_ada_acc*100)/10
avg_ada_precision = (total_ada_precision*100)/10
avg_ada_recall = (total_ada_recall*100)/10
avg_ada_f1 = (total_ada_f1*100)/10
avg_ada_roc = (total_gnb_roc*100)/10

avg_knn_acc = (total_knn_acc*100)/10
avg_knn_precision = (total_gnb_precision*100)/10
avg_knn_recall = (total_knn_recall*100)/10
avg_knn_f1 = (total_knn_f1*100)/10
avg_knn_roc =(total_knn_roc*100)/10

avg_gnb_acc = (total_gnb_acc*100)/10
avg_gnb_precision = (total_gnb_precision*100)/10
avg_gnb_recall =(total_gnb_recall*100)/10
avg_gnb_f1 = (total_gnb_f1*100)/10
avg_gnb_roc = (total_gnb_roc*100)/10

insert_dt = "insert into "+result_table+ " values('"+ site_name+"','dt','AC',"+\
(str)(len(m_other_feature))+","+(str)(avg_dt_acc)+","+(str)(avg_dt_precision)+","+\
(str)(avg_dt_recall) +","+(str)(avg_dt_f1) +","+(str)(avg_dt_roc) +")"
print "inset dt=", insert_dt
insert_cursor.execute(insert_dt)

insert_ada = "insert into "+result_table+ " values('"+ site_name+"','ada','AC',"+\
(str)(len(m_other_feature))+","+(str)(avg_ada_acc)+","+(str)(avg_ada_precision)+","+\
(str)(avg_ada_recall) +","+(str)(avg_ada_f1) +","+(str)(avg_ada_roc) +")"
insert_cursor.execute(insert_ada)

insert_knn = "insert into "+result_table+ " values('"+ site_name+"','knn','AC',"+\
(str)(len(m_other_feature))+","+(str)(avg_knn_acc)+","+(str)(avg_knn_precision)+","+\
(str)(avg_knn_recall) +","+(str)(avg_knn_f1) +","+(str)(avg_knn_roc) +")"
insert_cursor.execute(insert_knn)

insert_gnb = "insert into "+result_table+ " values('"+ site_name+"','gnb','AC',"+\
(str)(len(m_other_feature))+","+(str)(avg_gnb_acc)+","+(str)(avg_gnb_precision)+","+\
(str)(avg_gnb_recall) +","+(str)(avg_gnb_f1) +","+(str)(avg_gnb_roc) +")"
insert_cursor.execute(insert_gnb)

db1.commit()
