# -*- coding: utf-8 -*-
"""
@date: 2021/6/2 18:28
@file: tools.py
@author: lilong
@desc:  处理函数
"""

import numpy as np

import keras.backend as K


def tokenize(s, cf):
    jieba = cf.JIEBA
    return jieba.lcut(s, HMM=False)


def sent2vec(S, word2id, word2vec):
    """S格式：[[w1, w2]]
    """
    V = []
    for s in S:
        V.append([])
        for w in s:
            for _ in w:
                V[-1].append(word2id.get(w, 0))
    V = seq_padding(V)
    V = word2vec[V]
    return V


def seq_padding(X, padding=0):
    L = [len(x) for x in X]
    ML = max(L)
    return np.array([
        np.concatenate([x, [padding] * (ML - len(x))]) if len(x) < ML else x for x in X
    ])


def max_in_dict(d):
    if d:
        return sorted(d.items(), key=lambda s: -s[1])[0][0]


def seq_and_vec(x):
    x, v = x
    v = K.expand_dims(v, 1)
    v = K.tile(v, [1, K.shape(x)[1], 1])
    return K.concatenate([x, v], 2)
