#!/usr/bin/env python
# coding=utf-8
# __author__ = 'Yunchao Ling'

from pymongo import MongoClient
import sys
import re
from simhash import Simhash, SimhashIndex
import nltk

default_encoding = 'utf-8'
if sys.getdefaultencoding() != default_encoding:
    reload(sys)
    sys.setdefaultencoding(default_encoding)


def get_features(s):
    width = 3
    s = s.lower()
    s = re.sub(r'[^\w]+', '', s)
    return [s[i:i + width] for i in range(max(len(s) - width + 1, 1))]


def remove_stopwords(words, stopwords):
    result = []
    for word in words:
        if not word in stopwords:
            result.append(word)
    return result


def tokenize_stopwords_stem(line, stopwords):
    snowball = nltk.stem.SnowballStemmer('english')
    tokens = nltk.tokenize.TweetTokenizer().tokenize(line)
    stems_snowball = [snowball.stem(t) for t in tokens]
    result_tokens = remove_stopwords(stems_snowball, stopwords)
    return result_tokens


def merge_token(tokenSet):
    result = ""
    for token in tokenSet:
        result += token
    return result


def get_features2(line, stopwords):
    tokens = tokenize_stopwords_stem(line, stopwords)
    mergeToken = merge_token(tokens)
    resultSet = get_features(mergeToken)
    return resultSet


def merge_sets(ht, datasetHead, datasetList):
    ht2 = {}
    for item in ht[datasetHead]:
        ht2[item] = 0
    for set in datasetList:
        for item2 in ht[set]:
            ht2[item2] = 0
        ht.pop(set)
    resultList = []
    for key in ht2.keys():
        resultList.append(key)
    ht[datasetHead] = resultList
    return ht


def check_sets(ht, datasetHead):
    tryagain = True
    while tryagain:
        tryagain = False
        merge_list = []
        for item in ht[datasetHead]:
            if ht.has_key(item) and item != datasetHead:
                tryagain = True
                merge_list.append(item)
        merge_sets(ht, datasetHead, merge_list)


# 连接MongoDB
def InitMongoDB(host, port, database, collection):
    conn = MongoClient(host, port)
    db = conn[database]
    coll = db[collection]
    return conn, coll


# 关闭MongoDB
def CloseMongoDB(MongoDB_Connection):
    MongoDB_Connection.close()


def Classify_Group(wordlist, k_value, country, province, city):
    stopwords = nltk.corpus.stopwords.words('english')

    count = 0
    data = {}
    for line in wordlist:
        count += 1
        line = line.rstrip()
        data[count] = line
    # objs = [(str(k), Simhash(get_features(v))) for k, v in data.items()]
    objs = [(str(k), Simhash(get_features2(v, stopwords))) for k, v in data.items()]
    index = SimhashIndex(objs, k=k_value)
    # print index.bucket_size()

    ht = {}
    for key in data.keys():
        # ss = Simhash(get_features(data[key]))
        ss = Simhash(get_features2(data[key], stopwords))
        ss_near = index.get_near_dups(ss)
        ss_near2 = []
        for item in ss_near:
            ss_near2.append(int(item))
        ht[int(key)] = ss_near2

    for key in ht.keys():
        if ht.has_key(key):
            check_sets(ht, key)

    outfile1 = open(
        "D:/data/org_bulk_curation/" + country + "_" + province + "_" + city + "_k" + str(k_value) + ".stat", "w")
    outfile2 = open("D:/data/org_bulk_curation/" + country + "_" + province + "_" + city + "_k" + str(k_value) + ".tsv",
                    "w")
    result = []
    for k, v in ht.items():
        outfile1.write(str(k) + "\t" + str(len(v)) + "\t" + str(v) + "\n")
        outfile1.flush()
        for item3 in v:
            outfile2.write(str(item3) + "\t" + data[item3] + "\t" + str(k) + "\n")
            outfile2.flush()
            result.append([data[item3], k])
    outfile1.close()
    outfile2.close()
    return result


if __name__ == "__main__":
    country = "China"
    k_value = 5

    MongoDB_Connection, MongoDB_Collection = InitMongoDB(host="10.188.188.22", port=27017, database="organization",
                                                         collection="organization")
    MongoDB_Connection_2, MongoDB_Collection_2 = InitMongoDB(host="10.188.188.22", port=27017, database="org_bulk_curation",
                                                             collection="org_bulk_curation_china_k_" + str(k_value) + "_20160812")
    outfile = open("D:/data/org_bulk_curation_china_k_" + str(k_value) + "_20160812.tsv", "w")

    current_group_id = 0
    current_org_id = 0

    # 省列表
    province_result = MongoDB_Collection.distinct("province", {"country": "China"})
    if len(province_result) > 0:
        for province_item in province_result:
            # 市列表，有省
            city_result = MongoDB_Collection.distinct("city", {"country": "China", "province": province_item})
            if len(city_result) > 0:
                for city_item in city_result:
                    # 单位列表，有省有市
                    print province_item + "##" + city_item
                    org_list = MongoDB_Collection.distinct("uni_org",
                                                           {"country": country, "province": province_item,
                                                            "city": city_item})
                    if len(org_list) > 0:
                        org_group = Classify_Group(org_list, k_value, country, province_item, city_item)
                        temp_group_id = 0
                        for item in org_group:
                            current_org_id += 1
                            if item[1] != temp_group_id:
                                temp_group_id = item[1]
                                current_group_id += 1
                            outfile.write(
                                str(current_org_id) + "\t" + str(
                                    current_group_id) + "\t" + country + "\t" + province_item
                                + "\t" + city_item + "\t" + item[0] + "\n")
                            outfile.flush()
                            MongoDB_Collection_2.insert_one(
                                {"org_id": current_org_id, "group_id": current_group_id, "country": country,
                                 "province": province_item, "city": city_item, "org_name": item[0]})
            # 单位列表，有省无市
            no_city_list = MongoDB_Collection.distinct("uni_org",
                                                       {"country": country, "province": province_item,
                                                        "city": {"$exists": 0}})
            print province_item + "##Unknown"
            if len(no_city_list) > 0:
                org_group = Classify_Group(no_city_list, k_value, country, province_item, "Unknown")
                temp_group_id = 0
                for item in org_group:
                    current_org_id += 1
                    if item[1] != temp_group_id:
                        temp_group_id = item[1]
                        current_group_id += 1
                    outfile.write(
                        str(current_org_id) + "\t" + str(current_group_id) + "\t" + country + "\t" + province_item
                        + "\tUnknown\t" + item[0] + "\n")
                    outfile.flush()
                    MongoDB_Collection_2.insert_one(
                        {"org_id": current_org_id, "group_id": current_group_id, "country": country,
                         "province": province_item, "org_name": item[0]})
    # 市列表，无省（重名市）
    no_province_city_result = MongoDB_Collection.distinct("city", {"country": "China", "province": {"$exists": 0}})
    if len(no_province_city_result) > 0:
        for city_item in no_province_city_result:
            # 单位列表，无省有市
            print "Unknown##" + city_item
            org_list = MongoDB_Collection.distinct("uni_org",
                                                   {"country": country, "province": {"$exists": 0}, "city": city_item})
            if len(org_list) > 0:
                org_group = Classify_Group(org_list, k_value, country, "Unknown", city_item)
                temp_group_id = 0
                for item in org_group:
                    current_org_id += 1
                    if item[1] != temp_group_id:
                        temp_group_id = item[1]
                        current_group_id += 1
                    outfile.write(
                        str(current_org_id) + "\t" + str(
                            current_group_id) + "\t" + country + "\tUnknown\t" + city_item + "\t" + item[0] + "\n")
                    outfile.flush()
                    MongoDB_Collection_2.insert_one(
                        {"org_id": current_org_id, "group_id": current_group_id, "country": country, "city": city_item,
                         "org_name": item[0]})
    # 单位列表，无省无市
    no_province_no_city_list = MongoDB_Collection.distinct("uni_org",
                                                           {"country": country, "province": {"$exists": 0},
                                                            "city": {"$exists": 0}})
    print "Unknown##Unknown"
    if len(no_province_no_city_list) > 0:
        org_group = Classify_Group(no_province_no_city_list, k_value, country, "Unknown", "Unknown")
        temp_group_id = 0
        for item in org_group:
            current_org_id += 1
            if item[1] != temp_group_id:
                temp_group_id = item[1]
                current_group_id += 1
            outfile.write(
                str(current_org_id) + "\t" + str(current_group_id) + "\t" + country + "\tUnknown\tUnknown\t" +
                item[0] + "\n")
            outfile.flush()
            MongoDB_Collection_2.insert_one(
                {"org_id": current_org_id, "group_id": current_group_id, "country": country, "org_name": item[0]})

    CloseMongoDB(MongoDB_Connection)
    CloseMongoDB(MongoDB_Connection_2)
    outfile.close()
