'''
Created on 2019年8月23日
@author: sida
'''
from compute_wordsMatrixOfBertSimilarity import *
from util import Node
import util

import node_DistributionSimilarity as node_ds
import compute_DistributionSimilarity as c_ds
info2MatrixByBert = Node(name='info2MatrixByBert',sunNodes=[node_ds.node_csvFile2Obj],compute=info2MatrixByBert,persistence_path='cache/info2MatrixByBert')
node_computeF = Node(name='computeF',sunNodes=[info2MatrixByBert],compute=computeF,persistence_path='cache/computeF')
node_computeOSD = Node(name='computeOSD',sunNodes=[info2MatrixByBert],compute=computeOSD,persistence_path='cache/computeOSD')
node_inference_work = Node('inference_work',sunNodes=[node_computeF,node_ds.node_csvFile2Obj],compute=inference_work,persistence_path='cache/inference_work')
node_inference_OSD = Node(name='inference_OSD',sunNodes=[node_computeOSD,node_ds.node_csvFile2Obj],compute=c_ds.inference,save=False)

import node_DistributionSimilarity as node_ds
node_index2Sentences = Node(name='index2Sentences',sunNodes=[node_ds.node_csvFile2Obj],compute=index2Sentences,persistence_path='cache/index2Sentences')
node_sentences2MatrixByBert = Node(name='sentences2MatrixByBert',sunNodes=[node_index2Sentences],compute=sentences2MatrixByBert,persistence_path='cache/sentences2MatrixByBert')
node_computeF_sentences = Node(name='computeF_sentences',sunNodes=[node_sentences2MatrixByBert],compute=computeF,persistence_path='cache/computeF_sentences')
node_computeOSD_sentences = Node(name='computeOSD_sentences',sunNodes=[node_sentences2MatrixByBert],compute=computeOSD,persistence_path='cache/computeOSD_sentences')
node_inference_sentences = Node('inference_sentences',sunNodes=[node_computeF_sentences,node_ds.node_csvFile2Obj],compute=c_ds.inference,save=False)

#分词后的公司信息句子--》从bert得到句子中各个标签的句向量（标签看做短语句子），从而公司信息对应到矩阵--》计算cos值，大的相似-选inference_work topN大
import node_DistributionSimilarity as node_ds
node_sentences2MatrixByBert_tokens = Node(name='tokens_matrix',compute=sentences2MatrixByBert,sunNodes=[node_ds.node_CountMatrix])
node_computeF_sentences2MatrixByBert_tokens = Node(name='sentences2MatrixByBert_tokens',sunNodes=[node_sentences2MatrixByBert_tokens],\
                                                   compute=computeF,superParams=([],{'similarityF':util.vectorCos}))
node_inference_cos = Node(name='inference_cos',sunNodes=[node_computeF_sentences2MatrixByBert_tokens,node_ds.node_csvFile2Obj],compute=inference_work,save=False)
node_wmb_similarity = Node(name='node_wmb_similarity',compute=util.singel_model_similarity,sunNodes=[node_computeF_sentences2MatrixByBert_tokens],forceUpdate=True)

node_index2Info = Node(name='index2Info',compute=index2Info,sunNodes=[node_ds.node_csvFile2Obj],persistence_path='cache/index2Info')
node_texts2transferMatrix = Node(name='texts2transferMatrix',sunNodes=[node_index2Info],compute=util.texts2transferMatrix,persistence_path='cache/texts2transferMatrix')
node_transferMatrixMeanSimilarity = Node(name='transferMatrixMeanSimilarity',sunNodes=[node_texts2transferMatrix],\
    compute=computetransferMatrixMeanSimilarity,persistence_path='cache/transferMatrixMeanSimilarity')
if __name__ == '__main__':
    compulsory_Update_Nodes = [ node_wmb_similarity  ]
    NoForce_Update_Nodes = [  ]
    wanted_Nodes = NoForce_Update_Nodes + compulsory_Update_Nodes
    for node in compulsory_Update_Nodes:
        node.forceUpdate=True
    [print(type(node.getData())) for node in wanted_Nodes]
#     M = node_sentences2MatrixByBert_tokens.getData()
#     print(M[2][2])
    
    
    
    
