# coding=utf8

from multiprocessing import Process, Lock
import pickle
import time
import logging
import logging.config
logging.config.fileConfig('src/logging.conf')
logger = logging.getLogger('debug')

import src.train_word2vec.train
import src.cluster.create_graph
import src.cluster.cluster
import src.display.display_cluster
import src.mine_rule.mine_rule
import src.evaluate_rule.evaluate_rule
import src.ner.ner

from sys import argv
# import argparse
# parser = argparse.ArgumentParser(description='type inference system')
# parser.add_argument('-o', '--operation', type=int, dest='operation',
#     help='1 - train word2vec, 2 - cluster, 3 - mine rule, 4 - evaluater rule',required=True)
# parser.add_argument('-v', '--verbose', action='store_true', dest='verbose',
#     help='Enable debug info', required=False)


def train_word2vec():
    src.train_word2vec.train.train()


def confirm(selected_operation):
    if selected_operation == 1:
        print 'train_word2vec? [Y/n]',
    elif selected_operation == 2:
        print 'cluster? [Y/n]',
    elif selected_operation == 3:
        print 'mine rule? [Y/n]',
    elif selected_operation == 4:
        print 'evaluate rule? [Y/n]'
    else:
        return selected_operation
    x = 'Y' #raw_input()
    return selected_operation if x=='Y' else 0


def main(type_):
    a,op,pmin,positive_case_num = argv

    # args = parser.parse_args()
    # if args.verbose:
    #     logger.setLevel(logging.DEBUG)

    # selected_operation = args.operation
    selected_operation = int(op)
    selected_operation = confirm(selected_operation)
    if selected_operation == 0:
        pass
    elif selected_operation == 1:
        src.train_word2vec.train.train()
    elif selected_operation == 2:
        type_ = 'Individual oak trees'
        G, node_to_sentence_num, node_word, word_to_vector = \
            src.cluster.create_graph.create(type_)
        graphs = src.cluster.cluster.graph_cluster( \
            G, node_to_sentence_num, node_word, word_to_vector)
        src.display.display_cluster.display_cluster( \
            graphs, node_word, word_to_vector, node_to_sentence_num)
        print 'cluster data saved in data/'
    elif selected_operation == 3:
        type_ = 'Individual oak trees'
        G, node_to_sentence_num, node_word, word_to_vector = src.cluster.create_graph.create(type_)
        if True:
            graphs = src.cluster.cluster.graph_cluster( \
                G, node_to_sentence_num, node_word, word_to_vector)
            cluster_graph = src.display.display_cluster.display_cluster( \
                graphs, node_word, word_to_vector, node_to_sentence_num)
            pickle.dump(cluster_graph, open('data/cluster_graph.txt', 'wb'))
        else:
            cluster_graph = pickle.load(open('data/cluster_graph.txt', 'rb'))
        print 'generate cluster graph complete'
        precision_min = 0.8
        paths = src.mine_rule.mine_rule.mine(type_, cluster_graph, node_to_sentence_num, precision_min)
        logger.debug(len(paths))
        for path in paths:
            logger.debug(path)
    elif selected_operation == 4:
        lock = Lock()
        G, node_to_sentence_num, node_word, word_to_vector = src.cluster.create_graph.create(lock, type_)
        if True:
            graphs = src.cluster.cluster.graph_cluster(
                G, node_to_sentence_num, node_word, word_to_vector, type_)

            cluster_graph = src.display.display_cluster.display_cluster(
                graphs, node_word, word_to_vector, node_to_sentence_num)

            pickle.dump(cluster_graph, open('data/cluster_graph/' + type_.replace(' ', '_'), 'wb'))
        else:
            cluster_graph = pickle.load(open('data/cluster_graph/' + type_.replace(' ', '_'), 'rb'))
        print type_+'\t:\t'+'generate cluster graph complete'
        precision_min = float(pmin)
        src.mine_rule.mine_rule.mine(lock, type_, cluster_graph, node_to_sentence_num, precision_min)
        src.evaluate_rule.evaluate_rule.evaluate(lock, type_)
    elif selected_operation == 5:
        ner_train_file = 'data/ner_train.txt'
        ner_model_file = 'data/ner_model.crfsuite'
        src.ner.ner.ner_train(ner_train_file, ner_model_file)
    elif selected_operation == 6:
        ner_evaluate_file = 'data/ner_evaluate.txt'
        ner_model_file = 'data/ner_model.crfsuite'
        src.ner.ner.ner_evaluate(ner_evaluate_file, ner_model_file)
    else:
        print type_+'\t:\t'+'invalid operation'


if __name__ == '__main__':
    # process_list = []
    # for type_ in open('data/category_30.txt'):
        # type_ = type_.split('\t:\t')[0].strip()
        # main(type_)
    main('')
