#! /usr/bin/env python
# -*- coding: utf-8 -*-

import matplotlib.pyplot as plt
import utils.UrlsDataFromBaidu as DataBaidu

# import classifier.sklearn_svm as SVM
import preprocess.url_parser as url_parser


def draw_plot(x,ys,ticks=None,labels=None,title="",keep=0,points=None):
    plt.figure(1,figsize=(19,10))
    p = plt.subplot()
    p.cla()
    #group_labels = ['64k', '128k', '256k', '512k', '1024k', '2048k', '4096k', '8M', '16M', '32M', '64M', '128M', '256M',
    #                '512M']
    #p.set_title('broadcast(b) vs join(r)')
    #p.set_xlabel('data size')
    #p.set_ylabel('time(s)')
    for _y in ys:
        p.plot(x, _y, 'b', label='join')

    if points is not None:
        p.scatter(points[:,0],points[:,1])
    #p.bar(x, y)
    #p.plot(x1, y1,'r', label='broadcast')
    #p.plot(x2, y2,'b',label='join')
    #p.set_yticks(np.linspace(0, 480, num=28))
    if ticks is not None:
        p.set_xticks(ticks)
    if labels is not None:
        p.set_xticklabels(labels)

    #p.legend(bbox_to_anchor=[0.3, 1])

    p.grid()
    #plt.subplots_adjust()
    plt.show()
    #plt.savefig("dataset/s-roaming/tmp/" + title + "_plot.jpg", figsize=(11, 10), dpi=98)


def count_words_to_bag_map(_word_count_bag={},word=""):
    count = 1
    if _word_count_bag.has_key(word):
        count = _word_count_bag[word] +1
    _word_count_bag[word] = count


#X_features = preprocess.ExtractFeature(X)

if __name__ == "__main__":
    # get url and category data
    # data = Data.ExtractData('Search')
    data = DataBaidu.UrlDataFromBaidu()
    X = data.X
    Y = data.Y

    from sklearn.feature_selection import SelectKBest
    from sklearn.feature_selection import chi2

    #from sklearn.datasets import load_iris
    #iris = load_iris()
    #X, Y = iris.data, iris.target

    import numpy as np
    from preprocess import bag

    X_one_hot_list = bag.FeatureList2OneHot(X)
    Y_index = bag.Label2Index(Y)

    X_one_hot_list = np.array(X_one_hot_list)
    Y_index = np.array(Y_index)

    X_new = SelectKBest(chi2, k=100).fit(X_one_hot_list, Y_index)




    print ('--------------------')

    word_count_bag = {}

    for url in X:
        u = url_parser.UrlParser(url)

        # for w in u.domains:
        #    insert_bag(word_bag,w)
        for w in u.names:
            if w in ["baidu", "question", "zhidao"]:
                continue
            count_words_to_bag_map(word_count_bag, w)

    D = [(k, word_count_bag[k]) for k in word_count_bag.keys()]

    D = sorted(D, key=lambda x: x[1], reverse=True)

    print("words length = %d" % (len(D)))

    # show count of each features
    show_start = 0
    show_len = 400
    D = D[show_start:(show_start + show_len)]

    # show feature distribution in catagories
    draw_plot([i for i in range(show_len)], [[i[1] for i in D]], labels=[i[0].decode("utf-8") for i in D],
              ticks=range(show_len))

    pass







