
# @Time    : 19/6/25 10:40
# @Author  : 504747754@qq.com(ZengYang)
# @File    : simhash_worker.py
# @Software: PyCharm
# @ToUse  : simhash的海明计算，单独的算法模型
"""
maxid——17642803
去燥
分词(获取中文分词)
计算签名(tokens，)
海明计算（最快的批量计算方式）如果发现有小于3，即不再计算
simhash处理的时候，正则匹配中英文，拆分所有四字短语
准确度优化方案：
1.拆分短句
2.统计短句出现的次数，进行签名计算
计算方式：
1.从数据库一条条拿数据计算simhash，id，放入redis，修改数据表的持久化状态，否则不做调整
2.计算海明距离后，如果相似，记录相似id，并且修改状态
3.不相似，不做任何操作
"""
import re
import sys

sys.path.append('../../')
import jieba.analyse
import numpy as np

str2 = """
"""
REDIS_SIM_KEY = 'simkey'
CHECK_MAX_ID = 'max_id'
ID_SIMHASH_MAP = 'id_simhash'


class Simhash_worker():
    """
    用来进行分布式文章去重check
    """

    pass


def web_content_filter(content):
    """
    只保留汉字
    :param content:
    :return:
    """
    reg = r'[\u4e00-\u9fcc]+'
    # reg = r'[\W\u4e00-\u9fcc]+'
    content = ''.join(re.findall(reg, content))
    return content


def distance_haiming(hash1, hash2):
    t1 = '0b' + str(hash1)
    t2 = '0b' + str(hash2)
    n = int(t1, 2) ^ int(t2, 2)
    i = 0
    while n:
        n &= (n - 1)
        i += 1
    return i


class SimhashManager:
    def __init__(self, content):
        self.simhash = self.simhash(content)

    def __str__(self):
        return str(self.simhash)

    def simhash(self, content):
        content = web_content_filter(content)
        seg = jieba.cut(content)
        # jieba.analyse.set_stop_words('stopword')
        # jieba.analyse.set_idf_path(r'my_idf')
        keyWord = jieba.analyse.extract_tags(
            '|'.join(seg), topK=50, withWeight=True, allowPOS=())  # 在这里对jieba的tfidf.py进行了修改
        # 将tags = sorted(freq.items(), key=itemgetter(1), reverse=True)修改成tags = sorted(freq.items(), key=itemgetter(1,0), reverse=True)
        # 即先按照权重排序，再按照词排序
        keyList = []
        # print(len(keyWord))
        # print(keyWord)
        for feature, weight in keyWord:
            # weight = int(weight * 100)
            feature = self.string_hash(feature)
            temp = []
            for i in feature:
                if (i == '1'):
                    temp.append(weight)
                else:
                    temp.append(-weight)
            keyList.append(temp)
        list1 = np.sum(np.array(keyList), axis=0)
        if (keyList == []):  # 编码读不出来
            return '00'
        simhash = ''
        for i in list1:
            if (i > 0):
                simhash = simhash + '1'
            else:
                simhash = simhash + '0'
        return simhash

    def string_hash(self, source):
        if source == "":
            return 0
        else:
            x = ord(source[0]) << 7
            m = 1000003
            mask = 2 ** 128 - 1
            for c in source:
                x = ((x * m) ^ ord(c)) & mask
            x ^= len(source)
            if x == -1:
                x = -2
            x = bin(x).replace('0b', '').zfill(64)[-64:]
            return str(x)

    def distance(self, com):
        t1 = '0b' + self.simhash
        t2 = '0b' + com.simhash
        n = int(t1, 2) ^ int(t2, 2)
        i = 0
        while n:
            n &= (n - 1)
            i += 1
        return i

    def simhash_by_sentence(self, content):
        simhash = ''
        return simhash



if __name__ == '__main__':
    this_simhash1 = str(SimhashManager('申请一个QQ邮箱'))
    this_simhash2 = str(SimhashManager('注册一个QQ邮箱'))
    print(distance_haiming(this_simhash1, this_simhash2))