# -*- coding: utf-8 -*-
import os

import Data.ExtractData as Data
import numpy as np
import Data.UrlsDataFromBaidu as DataBaidu

# import classifier.sklearn_svm as SVM
import classifier.cjlin_liblinear as SVM
import preprocess
from preprocess import bag, CleanUp, suffle


def get_data_bag(X,Y):
    feature_bag = set()
    for f in X:
        feature_bag.update(f)
    feature_bag = list(feature_bag)

    label_bag = list(set(Y))
    return feature_bag,label_bag

def validation_two_level_in_three_level_model(two_level_X_one_hot_list,two_level_Y_label,three_level_label_bag):
    predict_values = model.predict(two_level_X_one_hot_list)

    correct_count = 0
    for i,v in enumerate(predict_values):
        word = three_level_label_bag[int(v) - 1].split(".")[1]
        label = two_level_Y_label[i]
        if label.find(word) != -1:
            correct_count += 1
    return float(correct_count) / len(predict_values)

model = SVM.SVM()
feature_bag,label_bag = [],[]
X = []
Y = []

data = Data.ExtractData(['Alexa','Search','World68','Hao123'],title=False).variant_label()
_X,_Y = CleanUp.remove_item_by_label(data.X,data.Y,["Interest.Finance"])
data = DataBaidu.UrlDataFromBaidu().convert_to_two_level()
_X.extend(_X)
_Y.extend(_Y)

_X,_Y = suffle.suffle(_X,_Y)

cut_pos = int(len(_X)*0.6)
X = _X[:cut_pos]
Y = _Y[:cut_pos]

X_features = preprocess.ExtractFeature(X)
#SVM.convert_to_data_as_liblinear_input_of_terminal(X_features,Y,"Data/svm_input.data")
#X_one_hot_list = SelectKBest(chi2, k=100).fit_transform(X_one_hot_list, Y_index)
feature_bag,label_bag = get_data_bag(X_features,Y)
print ("feature bag:%d label bag:%d"%(len(feature_bag),len(label_bag)))

model_file = "tmp/max_3_gram_as_feature_s1_two_level_suffle_0.80.model"
if os.path.exists(model_file) == False:
    X_one_hot_list = bag.FeatureList2OneHot(X_features)
    Y_index = bag.Label2Index(Y)
    Y_index = np.array(Y_index)
    print("shape:%d,%d" % (X_one_hot_list.shape[0], X_one_hot_list.shape[1]))
    X_train, Y_train = (X_one_hot_list,Y_index)

    model.train1(X_train,Y_train)
    model.save(model_file)
else:
    model.load(model_file)

#data = Data.ExtractData(['Alexa','Search','World68','Hao123'],title=False).variant_label()
#X,Y = CleanUp.remove_item_by_label(data.X,data.Y,["Interest.Finance"])
cut_pos = int(len(_X)*0.8)
X = _X[cut_pos:]
Y = _Y[cut_pos:]

X_features = preprocess.ExtractFeature(X)
X_one_hot_list = bag.FeatureList2OneHot(X_features, feature_bag)
Y_index = bag.Label2Index(Y,label_bag)
Y_index = np.array(Y_index)
print("shape:%d,%d" % (X_one_hot_list.shape[0], X_one_hot_list.shape[1]))

print(model.validation(X_one_hot_list,Y_index))










