"""
@This file will be used to classify data of serverfault 
"""
import numpy as np
import pylab
import matplotlib
import sklearn
from sklearn import ensemble
from sklearn import datasets
from sklearn.utils import shuffle
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.metrics import confusion_matrix
from sklearn.metrics import classification_report
from sklearn.metrics import accuracy_score
import MySQLdb
from sklearn.neighbors import KNeighborsClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn import tree

##==============================This needs to be delted===================


##========================================================================
#"""
port=3306
user="root"
password="123"
database="isec2014"
table_migrated = "train_migrated_serverfault_aug13"
table_not_migrated = "train_not_migrated_serverfault_aug13"

"""
site_name = "serverfault"
port=3307
user="sangeetal"
password="sangeetal"
database="sangeeta"
table_migrated = "train_migrated_"+site_name+"_aug13"
table_not_migrated = "train_not_migrated_"+site_name+"_aug13"
#"""


db1= MySQLdb.connect(host="localhost", user=user, passwd=password,db=database, port=port)
select_cursor = db1.cursor()


def clean(val):
    return val

#Read data from migrated table
str_m = "select f1_title, f1_body from "+ table_migrated


select_cursor.execute(str_m)
m_data = list()
target = list()
not_m_data = list()

migrated_data = select_cursor.fetchall()
for temp_data in migrated_data:
    q_body_title = temp_data[0]+" "+ temp_data[1]
    clean_q=clean(q_body_title)
    m_data.append(clean_q)
    target.append(1)
 
#np.random.seed(0)
#indices = np.random.permutation(len(m_data))    
str_not_m = "select f1_title, f1_body from "+ table_not_migrated
select_cursor.execute(str_not_m)

not_migrated_data = select_cursor.fetchall()

np.random.seed(1)
indices = np.random.permutation(len(not_migrated_data))[:len(m_data)]

count=0
for temp_data in not_migrated_data:
    #if count in indices:
    #    print "count=", count
    q_body_title = temp_data[0]+" "+ temp_data[1]
    clean_q=clean(q_body_title)
    not_m_data.append(clean_q)
    target.append(0)
    count= count+1
    if count >= len(m_data):
        break; 


total_data = m_data +not_m_data

total_data_arr = np.asarray(total_data)
target_arr = np.asarray(target)


#print "m_tuple_count=", len(total_tuple)
#print "len = ", len(target)


np.random.seed(1)
new_indices = np.random.permutation(len(total_data))

half_tuple = (len(total_data))/2
x_data =total_data_arr[new_indices[:half_tuple]]
y_train = target_arr[new_indices[:half_tuple]]

x_test_data = total_data_arr[new_indices[half_tuple:]] 
y_test = target_arr[new_indices[half_tuple:]]

print "len=  ", len(x_data), "type = ", type(x_data), "target len = ", len(y_train)
print "len =  ", len(x_test_data), "type = ", type(x_test_data), "target len ", len(y_test)

vectorizer = TfidfVectorizer(min_df=1)
x_train=vectorizer.fit_transform(x_data)
x_test = vectorizer.transform(x_test_data)

print "len=  ", x_test.shape
print "len = ", x_train.shape

knn = KNeighborsClassifier()
knn.fit(x_train, y_train)
KNeighborsClassifier(algorithm='auto', leaf_size=1, metric='minkowski',
           n_neighbors=3, p=2, weights='uniform')
predict_knn = knn.predict(x_test)
print(classification_report(y_test, predict_knn))
print accuracy_score(y_test, predict_knn)


dt = tree.DecisionTreeClassifier()
dt.fit(x_train.toarray(), y_train)

predict_dt = dt.predict(x_test.toarray())
print(classification_report(y_test, predict_dt))
print accuracy_score(y_test, predict_dt)


"""
iris_X_train = iris_X[indices[:-10]]
#print "y train=", iris.target
iris_y_train = iris_y[indices[:-10]]
iris_X_test  = iris_X[indices[-10:]]
iris_y_test  = iris_y[indices[-10:]]

    
    
m_data_array =  np.asarray(m_data)   
"""

