# -*- coding: utf-8 -*-

import os
os.chdir("../")
print os.getcwd()


import Data.NewsLabeledData as DataNews
#import classifier.sklearn_svm as SVM
import classifier.cjlin_liblinear as SVM
import numpy as np
from preprocess import bag, stopwords
import preprocess.gram as Gram
import preprocess.fenci as fenci

if __name__ == "__main__":

    d = DataNews.Data()

    X_features=[]
    st = stopwords.StopWords()
    for x in d.X:
        w = fenci.sentence_fenci(x)
        w = list(w)
        w = st.filterList(w)
        # 2 gram
        g = Gram.list_n_gram(w,2)
        # 1 gram
        for ww in w:
            if len(ww) > 1:
                g.append(ww)
        # remove duplicate
        g = list(set(g))
        X_features.append(g)

    SVM.SVM.convert_to_data_as_liblinear_input_of_terminal(X_features,d.Y,"./tmp/news_svm_input_data.txt")

    one_hot_x = bag.FeatureList2OneHot(X_features)
    Y_index = bag.Label2Index(d.Y)
    Y_index = np.array(Y_index)
    model = SVM.SVM()
    print model.cross_validation(one_hot_x,Y_index,5)

