diff --git "a/compressed_graph.dot" "b/compressed_graph.dot" new file mode 100644--- /dev/null +++ "b/compressed_graph.dot" @@ -0,0 +1,3610 @@ +strict digraph { +"0 /nncf_model_input_0"; +"1 /nncf_model_input_1"; +"2 /nncf_model_input_2"; +"3 BertForQuestionAnswering/BertModel[bert]/__getitem___0"; +"4 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[word_embeddings]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"5 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[word_embeddings]/embedding_0"; +"6 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[word_embeddings]/SymmetricQuantizer/symmetric_quantize_0"; +"7 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[token_type_embeddings]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"8 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[token_type_embeddings]/embedding_0"; +"9 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[token_type_embeddings]/SymmetricQuantizer/symmetric_quantize_0"; +"10 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/__add___0"; +"11 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/SymmetricQuantizer/symmetric_quantize_0"; +"12 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[position_embeddings]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"13 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[position_embeddings]/embedding_0"; +"14 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[position_embeddings]/SymmetricQuantizer/symmetric_quantize_0"; +"15 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/__iadd___0"; +"16 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/SymmetricQuantizer/symmetric_quantize_1"; +"17 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/LayerNorm[LayerNorm]/layer_norm_0"; +"18 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"19 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/Dropout[dropout]/dropout_0"; +"20 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"21 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"22 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"23 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"24 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"25 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0"; +"26 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0"; +"27 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"28 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"29 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"30 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"31 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"32 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0"; +"33 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0"; +"34 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/view_0"; +"35 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/permute_0"; +"36 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"37 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"38 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"39 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"40 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"41 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0"; +"42 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0"; +"43 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/view_1"; +"44 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/permute_1"; +"45 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/view_2"; +"46 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/permute_2"; +"47 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/transpose_0"; +"48 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/matmul_0"; +"49 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0"; +"50 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0"; +"51 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/__add___0"; +"52 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0"; +"53 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0"; +"54 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0"; +"55 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/matmul_1"; +"56 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1"; +"57 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/permute_3"; +"58 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0"; +"59 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/view_3"; +"60 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"61 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"62 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"63 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"64 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"65 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0"; +"66 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"67 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0"; +"68 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/__add___0"; +"69 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"70 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"71 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"72 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"73 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"74 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"75 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"76 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"77 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0"; +"78 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/gelu_0"; +"79 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0"; +"80 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"81 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"82 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"83 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"84 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"85 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/linear_0"; +"86 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"87 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/Dropout[dropout]/dropout_0"; +"88 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/__add___0"; +"89 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"90 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"91 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"92 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"93 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"94 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"95 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"96 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"97 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0"; +"98 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0"; +"99 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"100 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"101 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"102 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"103 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"104 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0"; +"105 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0"; +"106 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/view_0"; +"107 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/permute_0"; +"108 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"109 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"110 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"111 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"112 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"113 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0"; +"114 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0"; +"115 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/view_1"; +"116 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/permute_1"; +"117 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/view_2"; +"118 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/permute_2"; +"119 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/transpose_0"; +"120 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/matmul_0"; +"121 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0"; +"122 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0"; +"123 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/__add___0"; +"124 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0"; +"125 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0"; +"126 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0"; +"127 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/matmul_1"; +"128 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1"; +"129 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/permute_3"; +"130 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0"; +"131 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/view_3"; +"132 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"133 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"134 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"135 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"136 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"137 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0"; +"138 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"139 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0"; +"140 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/__add___0"; +"141 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"142 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"143 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"144 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"145 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"146 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"147 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"148 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"149 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0"; +"150 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/gelu_0"; +"151 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0"; +"152 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"153 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"154 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"155 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"156 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"157 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/linear_0"; +"158 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"159 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/Dropout[dropout]/dropout_0"; +"160 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/__add___0"; +"161 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"162 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"163 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"164 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"165 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"166 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"167 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"168 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"169 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0"; +"170 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0"; +"171 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"172 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"173 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"174 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"175 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"176 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0"; +"177 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0"; +"178 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/view_0"; +"179 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/permute_0"; +"180 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"181 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"182 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"183 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"184 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"185 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0"; +"186 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0"; +"187 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/view_1"; +"188 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/permute_1"; +"189 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/view_2"; +"190 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/permute_2"; +"191 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/transpose_0"; +"192 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/matmul_0"; +"193 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0"; +"194 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0"; +"195 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/__add___0"; +"196 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0"; +"197 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0"; +"198 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0"; +"199 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/matmul_1"; +"200 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1"; +"201 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/permute_3"; +"202 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0"; +"203 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/view_3"; +"204 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"205 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"206 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"207 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"208 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"209 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0"; +"210 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"211 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0"; +"212 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/__add___0"; +"213 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"214 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"215 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"216 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"217 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"218 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"219 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"220 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"221 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0"; +"222 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/gelu_0"; +"223 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0"; +"224 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"225 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"226 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"227 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"228 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"229 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/linear_0"; +"230 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"231 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/Dropout[dropout]/dropout_0"; +"232 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/__add___0"; +"233 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"234 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"235 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"236 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"237 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"238 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"239 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"240 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"241 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0"; +"242 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0"; +"243 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"244 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"245 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"246 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"247 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"248 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0"; +"249 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0"; +"250 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/view_0"; +"251 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/permute_0"; +"252 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"253 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"254 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"255 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"256 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"257 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0"; +"258 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0"; +"259 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/view_1"; +"260 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/permute_1"; +"261 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/view_2"; +"262 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/permute_2"; +"263 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/transpose_0"; +"264 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/matmul_0"; +"265 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0"; +"266 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0"; +"267 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/__add___0"; +"268 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0"; +"269 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0"; +"270 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0"; +"271 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/matmul_1"; +"272 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1"; +"273 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/permute_3"; +"274 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0"; +"275 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/view_3"; +"276 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"277 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"278 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"279 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"280 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"281 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0"; +"282 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"283 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0"; +"284 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/__add___0"; +"285 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"286 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"287 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"288 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"289 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"290 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"291 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"292 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"293 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0"; +"294 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/gelu_0"; +"295 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0"; +"296 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"297 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"298 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"299 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"300 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"301 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/linear_0"; +"302 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"303 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/Dropout[dropout]/dropout_0"; +"304 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/__add___0"; +"305 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"306 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"307 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"308 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"309 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"310 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"311 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"312 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"313 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0"; +"314 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0"; +"315 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"316 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"317 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"318 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"319 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"320 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0"; +"321 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0"; +"322 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/view_0"; +"323 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/permute_0"; +"324 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"325 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"326 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"327 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"328 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"329 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0"; +"330 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0"; +"331 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/view_1"; +"332 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/permute_1"; +"333 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/view_2"; +"334 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/permute_2"; +"335 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/transpose_0"; +"336 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/matmul_0"; +"337 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0"; +"338 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0"; +"339 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/__add___0"; +"340 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0"; +"341 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0"; +"342 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0"; +"343 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/matmul_1"; +"344 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1"; +"345 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/permute_3"; +"346 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0"; +"347 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/view_3"; +"348 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"349 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"350 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"351 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"352 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"353 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0"; +"354 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"355 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0"; +"356 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/__add___0"; +"357 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"358 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"359 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"360 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"361 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"362 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"363 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"364 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"365 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0"; +"366 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/gelu_0"; +"367 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0"; +"368 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"369 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"370 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"371 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"372 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"373 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/linear_0"; +"374 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"375 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/Dropout[dropout]/dropout_0"; +"376 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/__add___0"; +"377 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"378 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"379 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"380 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"381 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"382 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"383 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"384 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"385 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0"; +"386 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0"; +"387 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"388 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"389 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"390 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"391 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"392 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0"; +"393 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0"; +"394 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/view_0"; +"395 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/permute_0"; +"396 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"397 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"398 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"399 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"400 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"401 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0"; +"402 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0"; +"403 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/view_1"; +"404 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/permute_1"; +"405 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/view_2"; +"406 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/permute_2"; +"407 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/transpose_0"; +"408 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/matmul_0"; +"409 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0"; +"410 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0"; +"411 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/__add___0"; +"412 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0"; +"413 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0"; +"414 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0"; +"415 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/matmul_1"; +"416 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1"; +"417 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/permute_3"; +"418 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0"; +"419 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/view_3"; +"420 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"421 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"422 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"423 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"424 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"425 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0"; +"426 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"427 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0"; +"428 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/__add___0"; +"429 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"430 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"431 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"432 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"433 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"434 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"435 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"436 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"437 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0"; +"438 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/gelu_0"; +"439 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0"; +"440 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"441 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"442 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"443 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"444 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"445 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/linear_0"; +"446 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"447 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/Dropout[dropout]/dropout_0"; +"448 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/__add___0"; +"449 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"450 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"451 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"452 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"453 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"454 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"455 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"456 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"457 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0"; +"458 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0"; +"459 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"460 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"461 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"462 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"463 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"464 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0"; +"465 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0"; +"466 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/view_0"; +"467 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/permute_0"; +"468 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"469 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"470 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"471 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"472 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"473 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0"; +"474 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0"; +"475 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/view_1"; +"476 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/permute_1"; +"477 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/view_2"; +"478 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/permute_2"; +"479 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/transpose_0"; +"480 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/matmul_0"; +"481 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0"; +"482 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0"; +"483 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/__add___0"; +"484 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0"; +"485 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0"; +"486 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0"; +"487 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/matmul_1"; +"488 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1"; +"489 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/permute_3"; +"490 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0"; +"491 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/view_3"; +"492 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"493 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"494 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"495 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"496 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"497 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0"; +"498 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"499 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0"; +"500 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/__add___0"; +"501 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"502 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"503 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"504 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"505 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"506 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"507 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"508 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"509 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0"; +"510 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/gelu_0"; +"511 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0"; +"512 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"513 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"514 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"515 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"516 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"517 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/linear_0"; +"518 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"519 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/Dropout[dropout]/dropout_0"; +"520 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/__add___0"; +"521 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"522 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"523 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"524 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"525 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"526 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"527 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"528 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"529 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0"; +"530 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0"; +"531 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"532 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"533 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"534 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"535 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"536 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0"; +"537 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0"; +"538 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/view_0"; +"539 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/permute_0"; +"540 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"541 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"542 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"543 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"544 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"545 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0"; +"546 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0"; +"547 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/view_1"; +"548 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/permute_1"; +"549 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/view_2"; +"550 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/permute_2"; +"551 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/transpose_0"; +"552 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/matmul_0"; +"553 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0"; +"554 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0"; +"555 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/__add___0"; +"556 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0"; +"557 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0"; +"558 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0"; +"559 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/matmul_1"; +"560 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1"; +"561 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/permute_3"; +"562 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0"; +"563 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/view_3"; +"564 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"565 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"566 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"567 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"568 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"569 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0"; +"570 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"571 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0"; +"572 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/__add___0"; +"573 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"574 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"575 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"576 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"577 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"578 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"579 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"580 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"581 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0"; +"582 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/gelu_0"; +"583 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0"; +"584 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"585 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"586 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"587 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"588 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"589 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/linear_0"; +"590 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"591 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/Dropout[dropout]/dropout_0"; +"592 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/__add___0"; +"593 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"594 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"595 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"596 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"597 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"598 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"599 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"600 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"601 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0"; +"602 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0"; +"603 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"604 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"605 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"606 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"607 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"608 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0"; +"609 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0"; +"610 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/view_0"; +"611 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/permute_0"; +"612 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"613 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"614 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"615 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"616 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"617 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0"; +"618 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0"; +"619 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/view_1"; +"620 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/permute_1"; +"621 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/view_2"; +"622 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/permute_2"; +"623 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/transpose_0"; +"624 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/matmul_0"; +"625 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0"; +"626 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0"; +"627 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/__add___0"; +"628 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0"; +"629 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0"; +"630 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0"; +"631 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/matmul_1"; +"632 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1"; +"633 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/permute_3"; +"634 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0"; +"635 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/view_3"; +"636 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"637 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"638 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"639 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"640 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"641 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0"; +"642 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"643 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0"; +"644 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/__add___0"; +"645 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"646 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"647 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"648 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"649 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"650 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"651 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"652 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"653 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0"; +"654 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/gelu_0"; +"655 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0"; +"656 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"657 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"658 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"659 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"660 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"661 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/linear_0"; +"662 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"663 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/Dropout[dropout]/dropout_0"; +"664 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/__add___0"; +"665 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"666 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"667 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"668 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"669 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"670 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"671 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"672 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"673 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0"; +"674 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0"; +"675 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"676 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"677 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"678 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"679 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"680 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0"; +"681 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0"; +"682 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/view_0"; +"683 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/permute_0"; +"684 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"685 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"686 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"687 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"688 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"689 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0"; +"690 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0"; +"691 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/view_1"; +"692 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/permute_1"; +"693 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/view_2"; +"694 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/permute_2"; +"695 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/transpose_0"; +"696 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/matmul_0"; +"697 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0"; +"698 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0"; +"699 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/__add___0"; +"700 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0"; +"701 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0"; +"702 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0"; +"703 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/matmul_1"; +"704 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1"; +"705 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/permute_3"; +"706 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0"; +"707 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/view_3"; +"708 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"709 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"710 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"711 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"712 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"713 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0"; +"714 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"715 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0"; +"716 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/__add___0"; +"717 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"718 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"719 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"720 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"721 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"722 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"723 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"724 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"725 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0"; +"726 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/gelu_0"; +"727 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0"; +"728 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"729 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"730 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"731 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"732 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"733 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/linear_0"; +"734 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"735 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/Dropout[dropout]/dropout_0"; +"736 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/__add___0"; +"737 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"738 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"739 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"740 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"741 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"742 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"743 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"744 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"745 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0"; +"746 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0"; +"747 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"748 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"749 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"750 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"751 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"752 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0"; +"753 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0"; +"754 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/view_0"; +"755 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/permute_0"; +"756 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"757 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"758 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"759 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"760 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"761 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0"; +"762 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0"; +"763 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/view_1"; +"764 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/permute_1"; +"765 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/view_2"; +"766 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/permute_2"; +"767 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/transpose_0"; +"768 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/matmul_0"; +"769 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0"; +"770 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0"; +"771 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/__add___0"; +"772 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0"; +"773 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0"; +"774 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0"; +"775 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/matmul_1"; +"776 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1"; +"777 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/permute_3"; +"778 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0"; +"779 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/view_3"; +"780 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"781 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"782 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"783 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"784 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"785 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0"; +"786 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"787 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0"; +"788 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/__add___0"; +"789 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"790 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"791 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"792 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"793 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"794 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"795 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"796 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"797 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0"; +"798 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/gelu_0"; +"799 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0"; +"800 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"801 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"802 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"803 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"804 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"805 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/linear_0"; +"806 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"807 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/Dropout[dropout]/dropout_0"; +"808 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/__add___0"; +"809 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"810 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"811 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"812 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"813 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"814 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"815 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"816 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"817 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0"; +"818 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0"; +"819 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"820 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"821 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"822 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"823 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"824 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0"; +"825 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0"; +"826 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/view_0"; +"827 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/permute_0"; +"828 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"829 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"830 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"831 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"832 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"833 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0"; +"834 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0"; +"835 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/view_1"; +"836 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/permute_1"; +"837 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/view_2"; +"838 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/permute_2"; +"839 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/transpose_0"; +"840 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/matmul_0"; +"841 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0"; +"842 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0"; +"843 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/__add___0"; +"844 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0"; +"845 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0"; +"846 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0"; +"847 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/matmul_1"; +"848 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1"; +"849 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/permute_3"; +"850 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0"; +"851 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/view_3"; +"852 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"853 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"854 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"855 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"856 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"857 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0"; +"858 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"859 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0"; +"860 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/__add___0"; +"861 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"862 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"863 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"864 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"865 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"866 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"867 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"868 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"869 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0"; +"870 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/gelu_0"; +"871 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0"; +"872 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"873 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"874 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"875 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"876 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"877 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/linear_0"; +"878 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"879 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/Dropout[dropout]/dropout_0"; +"880 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/__add___0"; +"881 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"882 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"883 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"884 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"885 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"886 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"887 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"888 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"889 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0"; +"890 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0"; +"891 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"892 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"893 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"894 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"895 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"896 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0"; +"897 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0"; +"898 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/view_0"; +"899 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/permute_0"; +"900 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"901 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"902 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"903 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"904 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"905 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0"; +"906 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0"; +"907 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/view_1"; +"908 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/permute_1"; +"909 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/view_2"; +"910 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/permute_2"; +"911 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/transpose_0"; +"912 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/matmul_0"; +"913 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0"; +"914 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0"; +"915 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/__add___0"; +"916 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0"; +"917 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0"; +"918 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0"; +"919 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/matmul_1"; +"920 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1"; +"921 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/permute_3"; +"922 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0"; +"923 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/view_3"; +"924 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"925 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"926 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"927 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"928 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"929 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0"; +"930 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"931 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0"; +"932 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/__add___0"; +"933 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"934 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"935 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"936 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"937 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"938 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"939 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"940 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"941 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0"; +"942 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertIntermediate[intermediate]/gelu_0"; +"943 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0"; +"944 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"945 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"946 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"947 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"948 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"949 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/NNCFLinear[dense]/linear_0"; +"950 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"951 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/Dropout[dropout]/dropout_0"; +"952 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/__add___0"; +"953 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"954 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"955 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"956 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"957 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"958 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"959 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"960 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"961 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0"; +"962 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0"; +"963 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"964 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"965 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"966 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"967 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"968 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0"; +"969 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0"; +"970 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/view_0"; +"971 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/permute_0"; +"972 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"973 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"974 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"975 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"976 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"977 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0"; +"978 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0"; +"979 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/view_1"; +"980 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/permute_1"; +"981 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/view_2"; +"982 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/permute_2"; +"983 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/transpose_0"; +"984 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/matmul_0"; +"985 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0"; +"986 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0"; +"987 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/__add___0"; +"988 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0"; +"989 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0"; +"990 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0"; +"991 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/matmul_1"; +"992 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1"; +"993 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/permute_3"; +"994 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0"; +"995 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/view_3"; +"996 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"997 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"998 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"999 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1000 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1001 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0"; +"1002 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"1003 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0"; +"1004 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/__add___0"; +"1005 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"1006 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"1007 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"1008 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1009 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1010 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1011 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1012 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1013 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0"; +"1014 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertIntermediate[intermediate]/gelu_0"; +"1015 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0"; +"1016 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1017 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1018 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1019 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1020 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1021 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/NNCFLinear[dense]/linear_0"; +"1022 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"1023 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/Dropout[dropout]/dropout_0"; +"1024 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/__add___0"; +"1025 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"1026 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"1027 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"1028 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1029 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1030 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1031 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1032 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1033 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0"; +"1034 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0"; +"1035 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1036 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1037 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1038 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1039 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1040 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0"; +"1041 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0"; +"1042 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/view_0"; +"1043 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/permute_0"; +"1044 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1045 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1046 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1047 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1048 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1049 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0"; +"1050 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0"; +"1051 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/view_1"; +"1052 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/permute_1"; +"1053 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/view_2"; +"1054 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/permute_2"; +"1055 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/transpose_0"; +"1056 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/matmul_0"; +"1057 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0"; +"1058 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0"; +"1059 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/__add___0"; +"1060 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0"; +"1061 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0"; +"1062 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0"; +"1063 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/matmul_1"; +"1064 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1"; +"1065 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/permute_3"; +"1066 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0"; +"1067 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/view_3"; +"1068 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1069 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1070 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1071 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1072 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1073 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0"; +"1074 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"1075 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0"; +"1076 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/__add___0"; +"1077 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"1078 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"1079 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"1080 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1081 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1082 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1083 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1084 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1085 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0"; +"1086 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertIntermediate[intermediate]/gelu_0"; +"1087 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0"; +"1088 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1089 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1090 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1091 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1092 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1093 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/NNCFLinear[dense]/linear_0"; +"1094 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"1095 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/Dropout[dropout]/dropout_0"; +"1096 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/__add___0"; +"1097 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"1098 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"1099 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"1100 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1101 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1102 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1103 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1104 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1105 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0"; +"1106 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0"; +"1107 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1108 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1109 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1110 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1111 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1112 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0"; +"1113 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0"; +"1114 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/view_0"; +"1115 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/permute_0"; +"1116 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1117 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1118 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1119 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1120 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1121 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0"; +"1122 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0"; +"1123 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/view_1"; +"1124 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/permute_1"; +"1125 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/view_2"; +"1126 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/permute_2"; +"1127 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/transpose_0"; +"1128 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/matmul_0"; +"1129 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0"; +"1130 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0"; +"1131 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/__add___0"; +"1132 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0"; +"1133 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0"; +"1134 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0"; +"1135 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/matmul_1"; +"1136 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1"; +"1137 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/permute_3"; +"1138 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0"; +"1139 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/view_3"; +"1140 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1141 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1142 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1143 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1144 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1145 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0"; +"1146 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"1147 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0"; +"1148 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/__add___0"; +"1149 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"1150 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"1151 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"1152 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1153 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1154 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1155 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1156 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1157 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0"; +"1158 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertIntermediate[intermediate]/gelu_0"; +"1159 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0"; +"1160 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1161 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1162 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1163 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1164 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1165 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/NNCFLinear[dense]/linear_0"; +"1166 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"1167 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/Dropout[dropout]/dropout_0"; +"1168 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/__add___0"; +"1169 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"1170 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"1171 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"1172 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1173 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1174 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1175 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1176 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1177 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0"; +"1178 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0"; +"1179 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1180 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1181 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1182 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1183 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1184 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0"; +"1185 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0"; +"1186 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/view_0"; +"1187 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/permute_0"; +"1188 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1189 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1190 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1191 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1192 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1193 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0"; +"1194 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0"; +"1195 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/view_1"; +"1196 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/permute_1"; +"1197 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/view_2"; +"1198 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/permute_2"; +"1199 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/transpose_0"; +"1200 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/matmul_0"; +"1201 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0"; +"1202 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0"; +"1203 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/__add___0"; +"1204 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0"; +"1205 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0"; +"1206 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0"; +"1207 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/matmul_1"; +"1208 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1"; +"1209 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/permute_3"; +"1210 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0"; +"1211 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/view_3"; +"1212 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1213 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1214 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1215 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1216 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1217 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0"; +"1218 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"1219 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0"; +"1220 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/__add___0"; +"1221 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"1222 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"1223 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"1224 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1225 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1226 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1227 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1228 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1229 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0"; +"1230 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertIntermediate[intermediate]/gelu_0"; +"1231 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0"; +"1232 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1233 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1234 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1235 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1236 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1237 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/NNCFLinear[dense]/linear_0"; +"1238 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"1239 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/Dropout[dropout]/dropout_0"; +"1240 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/__add___0"; +"1241 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"1242 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"1243 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"1244 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1245 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1246 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1247 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1248 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1249 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0"; +"1250 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0"; +"1251 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1252 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1253 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1254 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1255 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1256 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0"; +"1257 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0"; +"1258 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/view_0"; +"1259 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/permute_0"; +"1260 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1261 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1262 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1263 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1264 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1265 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0"; +"1266 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0"; +"1267 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/view_1"; +"1268 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/permute_1"; +"1269 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/view_2"; +"1270 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/permute_2"; +"1271 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/transpose_0"; +"1272 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/matmul_0"; +"1273 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0"; +"1274 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0"; +"1275 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/__add___0"; +"1276 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0"; +"1277 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0"; +"1278 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0"; +"1279 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/matmul_1"; +"1280 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1"; +"1281 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/permute_3"; +"1282 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0"; +"1283 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/view_3"; +"1284 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1285 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1286 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1287 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1288 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1289 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0"; +"1290 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"1291 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0"; +"1292 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/__add___0"; +"1293 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"1294 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"1295 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"1296 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1297 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1298 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1299 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1300 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1301 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0"; +"1302 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertIntermediate[intermediate]/gelu_0"; +"1303 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0"; +"1304 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1305 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1306 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1307 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1308 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1309 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/NNCFLinear[dense]/linear_0"; +"1310 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"1311 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/Dropout[dropout]/dropout_0"; +"1312 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/__add___0"; +"1313 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"1314 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"1315 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"1316 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1317 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1318 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1319 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1320 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1321 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0"; +"1322 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0"; +"1323 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1324 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1325 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1326 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1327 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1328 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0"; +"1329 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0"; +"1330 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/view_0"; +"1331 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/permute_0"; +"1332 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1333 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1334 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1335 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1336 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1337 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0"; +"1338 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0"; +"1339 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/view_1"; +"1340 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/permute_1"; +"1341 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/view_2"; +"1342 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/permute_2"; +"1343 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/transpose_0"; +"1344 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/matmul_0"; +"1345 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0"; +"1346 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0"; +"1347 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/__add___0"; +"1348 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0"; +"1349 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0"; +"1350 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0"; +"1351 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/matmul_1"; +"1352 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1"; +"1353 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/permute_3"; +"1354 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0"; +"1355 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/view_3"; +"1356 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1357 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1358 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1359 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1360 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1361 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0"; +"1362 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"1363 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0"; +"1364 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/__add___0"; +"1365 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"1366 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"1367 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"1368 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1369 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1370 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1371 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1372 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1373 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0"; +"1374 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertIntermediate[intermediate]/gelu_0"; +"1375 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0"; +"1376 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1377 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1378 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1379 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1380 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1381 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/NNCFLinear[dense]/linear_0"; +"1382 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"1383 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/Dropout[dropout]/dropout_0"; +"1384 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/__add___0"; +"1385 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"1386 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"1387 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"1388 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1389 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1390 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1391 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1392 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1393 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0"; +"1394 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0"; +"1395 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1396 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1397 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1398 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1399 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1400 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0"; +"1401 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0"; +"1402 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/view_0"; +"1403 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/permute_0"; +"1404 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1405 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1406 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1407 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1408 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1409 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0"; +"1410 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0"; +"1411 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/view_1"; +"1412 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/permute_1"; +"1413 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/view_2"; +"1414 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/permute_2"; +"1415 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/transpose_0"; +"1416 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/matmul_0"; +"1417 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0"; +"1418 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0"; +"1419 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/__add___0"; +"1420 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0"; +"1421 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0"; +"1422 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0"; +"1423 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/matmul_1"; +"1424 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1"; +"1425 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/permute_3"; +"1426 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0"; +"1427 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/view_3"; +"1428 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1429 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1430 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1431 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1432 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1433 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0"; +"1434 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"1435 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0"; +"1436 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/__add___0"; +"1437 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"1438 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"1439 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"1440 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1441 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1442 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1443 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1444 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1445 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0"; +"1446 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertIntermediate[intermediate]/gelu_0"; +"1447 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0"; +"1448 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1449 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1450 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1451 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1452 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1453 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/NNCFLinear[dense]/linear_0"; +"1454 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"1455 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/Dropout[dropout]/dropout_0"; +"1456 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/__add___0"; +"1457 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"1458 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"1459 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"1460 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1461 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1462 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1463 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1464 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1465 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0"; +"1466 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0"; +"1467 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1468 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1469 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1470 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1471 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1472 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0"; +"1473 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0"; +"1474 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/view_0"; +"1475 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/permute_0"; +"1476 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1477 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1478 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1479 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1480 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1481 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0"; +"1482 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0"; +"1483 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/view_1"; +"1484 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/permute_1"; +"1485 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/view_2"; +"1486 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/permute_2"; +"1487 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/transpose_0"; +"1488 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/matmul_0"; +"1489 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0"; +"1490 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0"; +"1491 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/__add___0"; +"1492 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0"; +"1493 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0"; +"1494 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0"; +"1495 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/matmul_1"; +"1496 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1"; +"1497 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/permute_3"; +"1498 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0"; +"1499 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/view_3"; +"1500 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1501 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1502 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1503 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1504 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1505 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0"; +"1506 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"1507 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0"; +"1508 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/__add___0"; +"1509 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"1510 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"1511 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"1512 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1513 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1514 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1515 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1516 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1517 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0"; +"1518 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertIntermediate[intermediate]/gelu_0"; +"1519 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0"; +"1520 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1521 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1522 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1523 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1524 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1525 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/NNCFLinear[dense]/linear_0"; +"1526 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"1527 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/Dropout[dropout]/dropout_0"; +"1528 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/__add___0"; +"1529 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"1530 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"1531 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"1532 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1533 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1534 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1535 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1536 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1537 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0"; +"1538 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0"; +"1539 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1540 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1541 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1542 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1543 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1544 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0"; +"1545 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0"; +"1546 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/view_0"; +"1547 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/permute_0"; +"1548 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1549 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1550 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1551 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1552 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1553 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0"; +"1554 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0"; +"1555 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/view_1"; +"1556 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/permute_1"; +"1557 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/view_2"; +"1558 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/permute_2"; +"1559 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/transpose_0"; +"1560 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/matmul_0"; +"1561 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0"; +"1562 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0"; +"1563 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/__add___0"; +"1564 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0"; +"1565 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0"; +"1566 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0"; +"1567 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/matmul_1"; +"1568 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1"; +"1569 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/permute_3"; +"1570 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0"; +"1571 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/view_3"; +"1572 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1573 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1574 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1575 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1576 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1577 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0"; +"1578 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"1579 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0"; +"1580 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/__add___0"; +"1581 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"1582 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"1583 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"1584 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1585 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1586 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1587 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1588 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1589 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0"; +"1590 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertIntermediate[intermediate]/gelu_0"; +"1591 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0"; +"1592 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1593 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1594 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1595 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1596 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1597 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/NNCFLinear[dense]/linear_0"; +"1598 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"1599 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/Dropout[dropout]/dropout_0"; +"1600 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/__add___0"; +"1601 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"1602 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"1603 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"1604 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1605 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1606 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1607 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1608 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1609 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0"; +"1610 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0"; +"1611 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1612 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1613 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1614 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1615 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1616 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0"; +"1617 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0"; +"1618 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/view_0"; +"1619 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/permute_0"; +"1620 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1621 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1622 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1623 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1624 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1625 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0"; +"1626 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0"; +"1627 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/view_1"; +"1628 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/permute_1"; +"1629 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/view_2"; +"1630 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/permute_2"; +"1631 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/transpose_0"; +"1632 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/matmul_0"; +"1633 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0"; +"1634 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0"; +"1635 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/__add___0"; +"1636 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0"; +"1637 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0"; +"1638 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0"; +"1639 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/matmul_1"; +"1640 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1"; +"1641 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/permute_3"; +"1642 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0"; +"1643 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/view_3"; +"1644 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1645 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1646 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1647 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1648 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1649 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0"; +"1650 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"1651 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0"; +"1652 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/__add___0"; +"1653 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"1654 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"1655 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"1656 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1657 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1658 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1659 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1660 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1661 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0"; +"1662 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertIntermediate[intermediate]/gelu_0"; +"1663 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0"; +"1664 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1665 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1666 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1667 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1668 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1669 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/NNCFLinear[dense]/linear_0"; +"1670 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"1671 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/Dropout[dropout]/dropout_0"; +"1672 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/__add___0"; +"1673 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"1674 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"1675 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"1676 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1677 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1678 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1679 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1680 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1681 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0"; +"1682 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0"; +"1683 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1684 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1685 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1686 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1687 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1688 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0"; +"1689 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0"; +"1690 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/view_0"; +"1691 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/permute_0"; +"1692 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1693 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1694 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1695 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1696 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1697 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0"; +"1698 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0"; +"1699 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/view_1"; +"1700 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/permute_1"; +"1701 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/view_2"; +"1702 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/permute_2"; +"1703 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/transpose_0"; +"1704 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/matmul_0"; +"1705 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0"; +"1706 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0"; +"1707 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/__add___0"; +"1708 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0"; +"1709 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0"; +"1710 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0"; +"1711 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/matmul_1"; +"1712 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1"; +"1713 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/permute_3"; +"1714 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0"; +"1715 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/view_3"; +"1716 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1717 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1718 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1719 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1720 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1721 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0"; +"1722 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"1723 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0"; +"1724 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/__add___0"; +"1725 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"1726 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"1727 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"1728 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1729 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1730 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1731 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1732 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1733 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0"; +"1734 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertIntermediate[intermediate]/gelu_0"; +"1735 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0"; +"1736 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0"; +"1737 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1"; +"1738 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0"; +"1739 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1"; +"1740 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1741 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/NNCFLinear[dense]/linear_0"; +"1742 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0"; +"1743 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/Dropout[dropout]/dropout_0"; +"1744 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/__add___0"; +"1745 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0"; +"1746 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0"; +"1747 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0"; +"1748 BertForQuestionAnswering/NNCFLinear[qa_outputs]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0"; +"1749 BertForQuestionAnswering/NNCFLinear[qa_outputs]/linear_0"; +"1750 BertForQuestionAnswering/split_0"; +"1751 BertForQuestionAnswering/squeeze_0"; +"1752 BertForQuestionAnswering/contiguous_0"; +"1753 BertForQuestionAnswering/squeeze_1"; +"1754 BertForQuestionAnswering/contiguous_1"; +"1755 /nncf_model_output_0"; +"1756 /nncf_model_output_1"; +"0 /nncf_model_input_0" -> "5 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[word_embeddings]/embedding_0" [label="(1, 384)", style=dashed]; +"1 /nncf_model_input_1" -> "3 BertForQuestionAnswering/BertModel[bert]/__getitem___0" [label="(1, 384)", style=dashed]; +"2 /nncf_model_input_2" -> "8 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[token_type_embeddings]/embedding_0" [label="(1, 384)", style=dashed]; +"4 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[word_embeddings]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "5 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[word_embeddings]/embedding_0" [label="(30522, 1024)", style=solid]; +"5 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[word_embeddings]/embedding_0" -> "6 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[word_embeddings]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"6 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[word_embeddings]/SymmetricQuantizer/symmetric_quantize_0" -> "10 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/__add___0" [label="(1, 384, 1024)", style=solid]; +"7 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[token_type_embeddings]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "8 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[token_type_embeddings]/embedding_0" [label="(2, 1024)", style=solid]; +"8 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[token_type_embeddings]/embedding_0" -> "9 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[token_type_embeddings]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"9 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[token_type_embeddings]/SymmetricQuantizer/symmetric_quantize_0" -> "10 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/__add___0" [label="(1, 384, 1024)", style=solid]; +"10 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/__add___0" -> "11 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"11 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/SymmetricQuantizer/symmetric_quantize_0" -> "15 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/__iadd___0" [label="(1, 384, 1024)", style=solid]; +"12 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[position_embeddings]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "13 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[position_embeddings]/embedding_0" [label="(512, 1024)", style=solid]; +"13 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[position_embeddings]/embedding_0" -> "14 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[position_embeddings]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"14 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/NNCFEmbedding[position_embeddings]/SymmetricQuantizer/symmetric_quantize_0" -> "15 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/__iadd___0" [label="(1, 384, 1024)", style=solid]; +"15 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/__iadd___0" -> "16 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 384, 1024)", style=solid]; +"16 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/SymmetricQuantizer/symmetric_quantize_1" -> "17 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"17 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/LayerNorm[LayerNorm]/layer_norm_0" -> "18 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"18 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "19 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"19 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/Dropout[dropout]/dropout_0" -> "25 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1, 384, 1024)", style=solid]; +"19 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/Dropout[dropout]/dropout_0" -> "32 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1, 384, 1024)", style=solid]; +"19 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/Dropout[dropout]/dropout_0" -> "41 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1, 384, 1024)", style=solid]; +"19 BertForQuestionAnswering/BertModel[bert]/BertEmbeddings[embeddings]/Dropout[dropout]/dropout_0" -> "68 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"20 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "22 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"21 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "23 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"22 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "24 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"23 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "25 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024,)", style=solid]; +"24 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "25 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024, 1024)", style=solid]; +"25 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" -> "26 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"26 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" -> "45 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/view_2" [label="(1, 384, 1024)", style=solid]; +"27 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "29 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"28 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "30 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"29 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "31 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"30 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "32 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024,)", style=solid]; +"31 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "32 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024, 1024)", style=solid]; +"32 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" -> "33 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"33 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" -> "34 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/view_0" [label="(1, 384, 1024)", style=solid]; +"34 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/view_0" -> "35 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/permute_0" [label="(1, 384, 16, 64)", style=solid]; +"35 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/permute_0" -> "47 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" [label="(1, 16, 384, 64)", style=solid]; +"36 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "38 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"37 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "39 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"38 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "40 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"39 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "41 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024,)", style=solid]; +"40 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "41 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024, 1024)", style=solid]; +"41 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" -> "42 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"42 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" -> "43 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/view_1" [label="(1, 384, 1024)", style=solid]; +"43 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/view_1" -> "44 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/permute_1" [label="(1, 384, 16, 64)", style=solid]; +"44 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/permute_1" -> "55 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 64)", style=solid]; +"45 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/view_2" -> "46 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/permute_2" [label="(1, 384, 16, 64)", style=solid]; +"46 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/permute_2" -> "48 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 384, 64)", style=solid]; +"47 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" -> "48 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 64, 384)", style=solid]; +"48 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" -> "49 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" [label="(1, 16, 384, 384)", style=solid]; +"49 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" -> "50 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"50 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" -> "51 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/__add___0" [label="(1, 16, 384, 384)", style=solid]; +"51 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/__add___0" -> "52 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" [label="(1, 16, 384, 384)", style=solid]; +"52 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" -> "53 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"53 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" -> "54 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" [label="(1, 16, 384, 384)", style=solid]; +"54 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" -> "55 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 384)", style=solid]; +"55 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" -> "56 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 16, 384, 64)", style=solid]; +"56 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" -> "57 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/permute_3" [label="(1, 16, 384, 64)", style=solid]; +"57 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/permute_3" -> "58 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" [label="(1, 384, 16, 64)", style=solid]; +"58 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" -> "59 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/view_3" [label="(1, 384, 16, 64)", style=solid]; +"59 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfAttention[self]/view_3" -> "65 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"60 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "62 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"61 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "63 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"62 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "64 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"63 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "65 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"64 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "65 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 1024)", style=solid]; +"65 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" -> "66 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"66 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "67 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"67 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" -> "68 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"68 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/__add___0" -> "69 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"69 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "70 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"70 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "71 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"71 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "77 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"71 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "88 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"72 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "74 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(4096, 1024)", style=solid]; +"73 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "75 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(4096,)", style=solid]; +"74 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "76 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(4096, 1024)", style=solid]; +"75 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "77 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096,)", style=solid]; +"76 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "77 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096, 1024)", style=solid]; +"77 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" -> "78 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/gelu_0" [label="(1, 384, 4096)", style=solid]; +"78 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/gelu_0" -> "79 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 4096)", style=solid]; +"79 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" -> "85 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 4096)", style=solid]; +"80 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "82 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 4096)", style=solid]; +"81 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "83 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"82 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "84 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 4096)", style=solid]; +"83 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "85 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"84 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "85 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 4096)", style=solid]; +"85 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/linear_0" -> "86 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"86 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "87 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"87 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/Dropout[dropout]/dropout_0" -> "88 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"88 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/__add___0" -> "89 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"89 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "90 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"90 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "91 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"91 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "97 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1, 384, 1024)", style=solid]; +"91 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "104 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1, 384, 1024)", style=solid]; +"91 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "113 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1, 384, 1024)", style=solid]; +"91 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[0]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "140 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"92 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "94 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"93 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "95 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"94 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "96 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"95 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "97 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024,)", style=solid]; +"96 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "97 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024, 1024)", style=solid]; +"97 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" -> "98 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"98 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" -> "117 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/view_2" [label="(1, 384, 1024)", style=solid]; +"99 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "101 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"100 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "102 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"101 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "103 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"102 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "104 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024,)", style=solid]; +"103 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "104 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024, 1024)", style=solid]; +"104 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" -> "105 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"105 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" -> "106 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/view_0" [label="(1, 384, 1024)", style=solid]; +"106 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/view_0" -> "107 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/permute_0" [label="(1, 384, 16, 64)", style=solid]; +"107 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/permute_0" -> "119 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" [label="(1, 16, 384, 64)", style=solid]; +"108 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "110 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"109 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "111 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"110 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "112 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"111 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "113 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024,)", style=solid]; +"112 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "113 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024, 1024)", style=solid]; +"113 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" -> "114 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"114 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" -> "115 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/view_1" [label="(1, 384, 1024)", style=solid]; +"115 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/view_1" -> "116 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/permute_1" [label="(1, 384, 16, 64)", style=solid]; +"116 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/permute_1" -> "127 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 64)", style=solid]; +"117 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/view_2" -> "118 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/permute_2" [label="(1, 384, 16, 64)", style=solid]; +"118 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/permute_2" -> "120 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 384, 64)", style=solid]; +"119 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" -> "120 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 64, 384)", style=solid]; +"120 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" -> "121 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" [label="(1, 16, 384, 384)", style=solid]; +"121 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" -> "122 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"122 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" -> "123 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/__add___0" [label="(1, 16, 384, 384)", style=solid]; +"123 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/__add___0" -> "124 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" [label="(1, 16, 384, 384)", style=solid]; +"124 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" -> "125 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"125 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" -> "126 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" [label="(1, 16, 384, 384)", style=solid]; +"126 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" -> "127 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 384)", style=solid]; +"127 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" -> "128 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 16, 384, 64)", style=solid]; +"128 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" -> "129 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/permute_3" [label="(1, 16, 384, 64)", style=solid]; +"129 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/permute_3" -> "130 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" [label="(1, 384, 16, 64)", style=solid]; +"130 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" -> "131 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/view_3" [label="(1, 384, 16, 64)", style=solid]; +"131 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfAttention[self]/view_3" -> "137 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"132 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "134 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"133 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "135 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"134 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "136 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"135 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "137 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"136 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "137 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 1024)", style=solid]; +"137 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" -> "138 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"138 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "139 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"139 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" -> "140 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"140 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/__add___0" -> "141 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"141 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "142 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"142 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "143 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"143 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "149 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"143 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "160 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"144 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "146 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(4096, 1024)", style=solid]; +"145 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "147 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(4096,)", style=solid]; +"146 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "148 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(4096, 1024)", style=solid]; +"147 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "149 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096,)", style=solid]; +"148 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "149 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096, 1024)", style=solid]; +"149 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" -> "150 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/gelu_0" [label="(1, 384, 4096)", style=solid]; +"150 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/gelu_0" -> "151 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 4096)", style=solid]; +"151 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" -> "157 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 4096)", style=solid]; +"152 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "154 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 4096)", style=solid]; +"153 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "155 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"154 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "156 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 4096)", style=solid]; +"155 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "157 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"156 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "157 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 4096)", style=solid]; +"157 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/linear_0" -> "158 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"158 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "159 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"159 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/Dropout[dropout]/dropout_0" -> "160 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"160 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/__add___0" -> "161 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"161 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "162 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"162 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "163 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"163 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "169 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1, 384, 1024)", style=solid]; +"163 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "176 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1, 384, 1024)", style=solid]; +"163 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "185 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1, 384, 1024)", style=solid]; +"163 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[1]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "212 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"164 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "166 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"165 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "167 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"166 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "168 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"167 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "169 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024,)", style=solid]; +"168 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "169 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024, 1024)", style=solid]; +"169 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" -> "170 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"170 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" -> "189 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/view_2" [label="(1, 384, 1024)", style=solid]; +"171 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "173 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"172 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "174 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"173 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "175 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"174 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "176 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024,)", style=solid]; +"175 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "176 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024, 1024)", style=solid]; +"176 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" -> "177 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"177 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" -> "178 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/view_0" [label="(1, 384, 1024)", style=solid]; +"178 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/view_0" -> "179 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/permute_0" [label="(1, 384, 16, 64)", style=solid]; +"179 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/permute_0" -> "191 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" [label="(1, 16, 384, 64)", style=solid]; +"180 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "182 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"181 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "183 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"182 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "184 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"183 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "185 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024,)", style=solid]; +"184 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "185 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024, 1024)", style=solid]; +"185 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" -> "186 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"186 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" -> "187 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/view_1" [label="(1, 384, 1024)", style=solid]; +"187 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/view_1" -> "188 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/permute_1" [label="(1, 384, 16, 64)", style=solid]; +"188 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/permute_1" -> "199 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 64)", style=solid]; +"189 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/view_2" -> "190 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/permute_2" [label="(1, 384, 16, 64)", style=solid]; +"190 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/permute_2" -> "192 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 384, 64)", style=solid]; +"191 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" -> "192 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 64, 384)", style=solid]; +"192 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" -> "193 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" [label="(1, 16, 384, 384)", style=solid]; +"193 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" -> "194 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"194 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" -> "195 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/__add___0" [label="(1, 16, 384, 384)", style=solid]; +"195 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/__add___0" -> "196 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" [label="(1, 16, 384, 384)", style=solid]; +"196 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" -> "197 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"197 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" -> "198 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" [label="(1, 16, 384, 384)", style=solid]; +"198 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" -> "199 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 384)", style=solid]; +"199 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" -> "200 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 16, 384, 64)", style=solid]; +"200 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" -> "201 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/permute_3" [label="(1, 16, 384, 64)", style=solid]; +"201 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/permute_3" -> "202 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" [label="(1, 384, 16, 64)", style=solid]; +"202 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" -> "203 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/view_3" [label="(1, 384, 16, 64)", style=solid]; +"203 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfAttention[self]/view_3" -> "209 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"204 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "206 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"205 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "207 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"206 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "208 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"207 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "209 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"208 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "209 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 1024)", style=solid]; +"209 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" -> "210 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"210 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "211 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"211 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" -> "212 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"212 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/__add___0" -> "213 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"213 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "214 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"214 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "215 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"215 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "221 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"215 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "232 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"216 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "218 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(4096, 1024)", style=solid]; +"217 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "219 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(4096,)", style=solid]; +"218 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "220 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(4096, 1024)", style=solid]; +"219 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "221 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096,)", style=solid]; +"220 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "221 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096, 1024)", style=solid]; +"221 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" -> "222 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/gelu_0" [label="(1, 384, 4096)", style=solid]; +"222 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/gelu_0" -> "223 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 4096)", style=solid]; +"223 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" -> "229 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 4096)", style=solid]; +"224 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "226 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 4096)", style=solid]; +"225 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "227 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"226 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "228 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 4096)", style=solid]; +"227 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "229 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"228 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "229 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 4096)", style=solid]; +"229 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/linear_0" -> "230 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"230 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "231 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"231 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/Dropout[dropout]/dropout_0" -> "232 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"232 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/__add___0" -> "233 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"233 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "234 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"234 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "235 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"235 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "241 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1, 384, 1024)", style=solid]; +"235 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "248 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1, 384, 1024)", style=solid]; +"235 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "257 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1, 384, 1024)", style=solid]; +"235 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[2]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "284 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"236 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "238 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"237 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "239 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"238 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "240 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"239 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "241 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024,)", style=solid]; +"240 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "241 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024, 1024)", style=solid]; +"241 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" -> "242 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"242 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" -> "261 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/view_2" [label="(1, 384, 1024)", style=solid]; +"243 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "245 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"244 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "246 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"245 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "247 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"246 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "248 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024,)", style=solid]; +"247 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "248 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024, 1024)", style=solid]; +"248 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" -> "249 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"249 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" -> "250 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/view_0" [label="(1, 384, 1024)", style=solid]; +"250 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/view_0" -> "251 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/permute_0" [label="(1, 384, 16, 64)", style=solid]; +"251 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/permute_0" -> "263 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" [label="(1, 16, 384, 64)", style=solid]; +"252 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "254 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"253 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "255 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"254 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "256 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"255 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "257 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024,)", style=solid]; +"256 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "257 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024, 1024)", style=solid]; +"257 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" -> "258 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"258 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" -> "259 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/view_1" [label="(1, 384, 1024)", style=solid]; +"259 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/view_1" -> "260 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/permute_1" [label="(1, 384, 16, 64)", style=solid]; +"260 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/permute_1" -> "271 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 64)", style=solid]; +"261 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/view_2" -> "262 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/permute_2" [label="(1, 384, 16, 64)", style=solid]; +"262 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/permute_2" -> "264 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 384, 64)", style=solid]; +"263 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" -> "264 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 64, 384)", style=solid]; +"264 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" -> "265 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" [label="(1, 16, 384, 384)", style=solid]; +"265 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" -> "266 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"266 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" -> "267 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/__add___0" [label="(1, 16, 384, 384)", style=solid]; +"267 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/__add___0" -> "268 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" [label="(1, 16, 384, 384)", style=solid]; +"268 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" -> "269 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"269 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" -> "270 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" [label="(1, 16, 384, 384)", style=solid]; +"270 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" -> "271 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 384)", style=solid]; +"271 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" -> "272 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 16, 384, 64)", style=solid]; +"272 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" -> "273 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/permute_3" [label="(1, 16, 384, 64)", style=solid]; +"273 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/permute_3" -> "274 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" [label="(1, 384, 16, 64)", style=solid]; +"274 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" -> "275 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/view_3" [label="(1, 384, 16, 64)", style=solid]; +"275 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfAttention[self]/view_3" -> "281 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"276 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "278 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"277 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "279 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"278 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "280 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"279 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "281 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"280 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "281 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 1024)", style=solid]; +"281 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" -> "282 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"282 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "283 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"283 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" -> "284 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"284 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/__add___0" -> "285 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"285 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "286 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"286 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "287 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"287 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "293 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"287 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "304 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"288 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "290 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(4096, 1024)", style=solid]; +"289 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "291 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(4096,)", style=solid]; +"290 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "292 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(4096, 1024)", style=solid]; +"291 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "293 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096,)", style=solid]; +"292 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "293 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096, 1024)", style=solid]; +"293 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" -> "294 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/gelu_0" [label="(1, 384, 4096)", style=solid]; +"294 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/gelu_0" -> "295 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 4096)", style=solid]; +"295 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" -> "301 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 4096)", style=solid]; +"296 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "298 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 4096)", style=solid]; +"297 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "299 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"298 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "300 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 4096)", style=solid]; +"299 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "301 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"300 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "301 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 4096)", style=solid]; +"301 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/linear_0" -> "302 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"302 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "303 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"303 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/Dropout[dropout]/dropout_0" -> "304 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"304 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/__add___0" -> "305 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"305 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "306 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"306 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "307 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"307 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "313 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1, 384, 1024)", style=solid]; +"307 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "320 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1, 384, 1024)", style=solid]; +"307 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "329 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1, 384, 1024)", style=solid]; +"307 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[3]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "356 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"308 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "310 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"309 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "311 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"310 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "312 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"311 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "313 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024,)", style=solid]; +"312 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "313 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024, 1024)", style=solid]; +"313 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" -> "314 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"314 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" -> "333 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/view_2" [label="(1, 384, 1024)", style=solid]; +"315 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "317 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"316 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "318 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"317 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "319 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"318 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "320 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024,)", style=solid]; +"319 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "320 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024, 1024)", style=solid]; +"320 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" -> "321 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"321 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" -> "322 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/view_0" [label="(1, 384, 1024)", style=solid]; +"322 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/view_0" -> "323 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/permute_0" [label="(1, 384, 16, 64)", style=solid]; +"323 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/permute_0" -> "335 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" [label="(1, 16, 384, 64)", style=solid]; +"324 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "326 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"325 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "327 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"326 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "328 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"327 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "329 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024,)", style=solid]; +"328 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "329 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024, 1024)", style=solid]; +"329 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" -> "330 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"330 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" -> "331 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/view_1" [label="(1, 384, 1024)", style=solid]; +"331 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/view_1" -> "332 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/permute_1" [label="(1, 384, 16, 64)", style=solid]; +"332 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/permute_1" -> "343 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 64)", style=solid]; +"333 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/view_2" -> "334 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/permute_2" [label="(1, 384, 16, 64)", style=solid]; +"334 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/permute_2" -> "336 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 384, 64)", style=solid]; +"335 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" -> "336 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 64, 384)", style=solid]; +"336 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" -> "337 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" [label="(1, 16, 384, 384)", style=solid]; +"337 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" -> "338 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"338 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" -> "339 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/__add___0" [label="(1, 16, 384, 384)", style=solid]; +"339 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/__add___0" -> "340 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" [label="(1, 16, 384, 384)", style=solid]; +"340 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" -> "341 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"341 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" -> "342 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" [label="(1, 16, 384, 384)", style=solid]; +"342 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" -> "343 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 384)", style=solid]; +"343 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" -> "344 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 16, 384, 64)", style=solid]; +"344 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" -> "345 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/permute_3" [label="(1, 16, 384, 64)", style=solid]; +"345 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/permute_3" -> "346 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" [label="(1, 384, 16, 64)", style=solid]; +"346 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" -> "347 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/view_3" [label="(1, 384, 16, 64)", style=solid]; +"347 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfAttention[self]/view_3" -> "353 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"348 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "350 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"349 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "351 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"350 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "352 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"351 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "353 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"352 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "353 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 1024)", style=solid]; +"353 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" -> "354 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"354 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "355 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"355 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" -> "356 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"356 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/__add___0" -> "357 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"357 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "358 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"358 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "359 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"359 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "365 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"359 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "376 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"360 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "362 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(4096, 1024)", style=solid]; +"361 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "363 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(4096,)", style=solid]; +"362 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "364 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(4096, 1024)", style=solid]; +"363 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "365 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096,)", style=solid]; +"364 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "365 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096, 1024)", style=solid]; +"365 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" -> "366 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/gelu_0" [label="(1, 384, 4096)", style=solid]; +"366 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/gelu_0" -> "367 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 4096)", style=solid]; +"367 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" -> "373 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 4096)", style=solid]; +"368 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "370 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 4096)", style=solid]; +"369 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "371 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"370 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "372 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 4096)", style=solid]; +"371 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "373 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"372 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "373 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 4096)", style=solid]; +"373 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/linear_0" -> "374 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"374 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "375 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"375 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/Dropout[dropout]/dropout_0" -> "376 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"376 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/__add___0" -> "377 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"377 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "378 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"378 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "379 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"379 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "385 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1, 384, 1024)", style=solid]; +"379 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "392 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1, 384, 1024)", style=solid]; +"379 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "401 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1, 384, 1024)", style=solid]; +"379 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[4]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "428 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"380 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "382 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"381 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "383 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"382 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "384 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"383 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "385 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024,)", style=solid]; +"384 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "385 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024, 1024)", style=solid]; +"385 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" -> "386 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"386 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" -> "405 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/view_2" [label="(1, 384, 1024)", style=solid]; +"387 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "389 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"388 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "390 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"389 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "391 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"390 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "392 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024,)", style=solid]; +"391 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "392 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024, 1024)", style=solid]; +"392 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" -> "393 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"393 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" -> "394 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/view_0" [label="(1, 384, 1024)", style=solid]; +"394 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/view_0" -> "395 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/permute_0" [label="(1, 384, 16, 64)", style=solid]; +"395 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/permute_0" -> "407 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" [label="(1, 16, 384, 64)", style=solid]; +"396 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "398 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"397 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "399 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"398 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "400 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"399 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "401 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024,)", style=solid]; +"400 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "401 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024, 1024)", style=solid]; +"401 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" -> "402 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"402 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" -> "403 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/view_1" [label="(1, 384, 1024)", style=solid]; +"403 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/view_1" -> "404 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/permute_1" [label="(1, 384, 16, 64)", style=solid]; +"404 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/permute_1" -> "415 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 64)", style=solid]; +"405 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/view_2" -> "406 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/permute_2" [label="(1, 384, 16, 64)", style=solid]; +"406 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/permute_2" -> "408 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 384, 64)", style=solid]; +"407 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" -> "408 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 64, 384)", style=solid]; +"408 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" -> "409 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" [label="(1, 16, 384, 384)", style=solid]; +"409 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" -> "410 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"410 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" -> "411 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/__add___0" [label="(1, 16, 384, 384)", style=solid]; +"411 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/__add___0" -> "412 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" [label="(1, 16, 384, 384)", style=solid]; +"412 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" -> "413 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"413 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" -> "414 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" [label="(1, 16, 384, 384)", style=solid]; +"414 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" -> "415 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 384)", style=solid]; +"415 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" -> "416 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 16, 384, 64)", style=solid]; +"416 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" -> "417 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/permute_3" [label="(1, 16, 384, 64)", style=solid]; +"417 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/permute_3" -> "418 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" [label="(1, 384, 16, 64)", style=solid]; +"418 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" -> "419 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/view_3" [label="(1, 384, 16, 64)", style=solid]; +"419 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfAttention[self]/view_3" -> "425 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"420 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "422 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"421 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "423 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"422 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "424 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"423 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "425 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"424 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "425 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 1024)", style=solid]; +"425 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" -> "426 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"426 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "427 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"427 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" -> "428 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"428 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/__add___0" -> "429 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"429 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "430 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"430 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "431 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"431 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "437 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"431 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "448 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"432 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "434 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(4096, 1024)", style=solid]; +"433 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "435 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(4096,)", style=solid]; +"434 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "436 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(4096, 1024)", style=solid]; +"435 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "437 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096,)", style=solid]; +"436 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "437 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096, 1024)", style=solid]; +"437 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" -> "438 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/gelu_0" [label="(1, 384, 4096)", style=solid]; +"438 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/gelu_0" -> "439 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 4096)", style=solid]; +"439 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" -> "445 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 4096)", style=solid]; +"440 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "442 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 4096)", style=solid]; +"441 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "443 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"442 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "444 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 4096)", style=solid]; +"443 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "445 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"444 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "445 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 4096)", style=solid]; +"445 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/linear_0" -> "446 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"446 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "447 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"447 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/Dropout[dropout]/dropout_0" -> "448 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"448 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/__add___0" -> "449 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"449 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "450 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"450 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "451 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"451 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "457 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1, 384, 1024)", style=solid]; +"451 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "464 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1, 384, 1024)", style=solid]; +"451 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "473 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1, 384, 1024)", style=solid]; +"451 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[5]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "500 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"452 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "454 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"453 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "455 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"454 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "456 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"455 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "457 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024,)", style=solid]; +"456 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "457 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024, 1024)", style=solid]; +"457 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" -> "458 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"458 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" -> "477 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/view_2" [label="(1, 384, 1024)", style=solid]; +"459 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "461 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"460 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "462 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"461 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "463 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"462 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "464 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024,)", style=solid]; +"463 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "464 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024, 1024)", style=solid]; +"464 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" -> "465 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"465 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" -> "466 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/view_0" [label="(1, 384, 1024)", style=solid]; +"466 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/view_0" -> "467 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/permute_0" [label="(1, 384, 16, 64)", style=solid]; +"467 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/permute_0" -> "479 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" [label="(1, 16, 384, 64)", style=solid]; +"468 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "470 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"469 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "471 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"470 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "472 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"471 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "473 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024,)", style=solid]; +"472 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "473 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024, 1024)", style=solid]; +"473 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" -> "474 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"474 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" -> "475 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/view_1" [label="(1, 384, 1024)", style=solid]; +"475 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/view_1" -> "476 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/permute_1" [label="(1, 384, 16, 64)", style=solid]; +"476 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/permute_1" -> "487 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 64)", style=solid]; +"477 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/view_2" -> "478 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/permute_2" [label="(1, 384, 16, 64)", style=solid]; +"478 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/permute_2" -> "480 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 384, 64)", style=solid]; +"479 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" -> "480 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 64, 384)", style=solid]; +"480 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" -> "481 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" [label="(1, 16, 384, 384)", style=solid]; +"481 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" -> "482 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"482 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" -> "483 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/__add___0" [label="(1, 16, 384, 384)", style=solid]; +"483 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/__add___0" -> "484 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" [label="(1, 16, 384, 384)", style=solid]; +"484 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" -> "485 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"485 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" -> "486 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" [label="(1, 16, 384, 384)", style=solid]; +"486 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" -> "487 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 384)", style=solid]; +"487 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" -> "488 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 16, 384, 64)", style=solid]; +"488 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" -> "489 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/permute_3" [label="(1, 16, 384, 64)", style=solid]; +"489 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/permute_3" -> "490 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" [label="(1, 384, 16, 64)", style=solid]; +"490 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" -> "491 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/view_3" [label="(1, 384, 16, 64)", style=solid]; +"491 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfAttention[self]/view_3" -> "497 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"492 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "494 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"493 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "495 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"494 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "496 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"495 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "497 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"496 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "497 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 1024)", style=solid]; +"497 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" -> "498 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"498 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "499 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"499 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" -> "500 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"500 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/__add___0" -> "501 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"501 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "502 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"502 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "503 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"503 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "509 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"503 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "520 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"504 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "506 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(4096, 1024)", style=solid]; +"505 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "507 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(4096,)", style=solid]; +"506 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "508 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(4096, 1024)", style=solid]; +"507 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "509 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096,)", style=solid]; +"508 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "509 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096, 1024)", style=solid]; +"509 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" -> "510 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/gelu_0" [label="(1, 384, 4096)", style=solid]; +"510 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/gelu_0" -> "511 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 4096)", style=solid]; +"511 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" -> "517 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 4096)", style=solid]; +"512 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "514 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 4096)", style=solid]; +"513 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "515 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"514 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "516 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 4096)", style=solid]; +"515 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "517 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"516 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "517 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 4096)", style=solid]; +"517 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/linear_0" -> "518 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"518 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "519 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"519 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/Dropout[dropout]/dropout_0" -> "520 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"520 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/__add___0" -> "521 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"521 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "522 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"522 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "523 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"523 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "529 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1, 384, 1024)", style=solid]; +"523 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "536 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1, 384, 1024)", style=solid]; +"523 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "545 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1, 384, 1024)", style=solid]; +"523 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[6]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "572 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"524 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "526 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"525 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "527 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"526 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "528 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"527 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "529 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024,)", style=solid]; +"528 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "529 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024, 1024)", style=solid]; +"529 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" -> "530 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"530 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" -> "549 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/view_2" [label="(1, 384, 1024)", style=solid]; +"531 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "533 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"532 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "534 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"533 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "535 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"534 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "536 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024,)", style=solid]; +"535 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "536 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024, 1024)", style=solid]; +"536 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" -> "537 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"537 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" -> "538 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/view_0" [label="(1, 384, 1024)", style=solid]; +"538 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/view_0" -> "539 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/permute_0" [label="(1, 384, 16, 64)", style=solid]; +"539 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/permute_0" -> "551 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" [label="(1, 16, 384, 64)", style=solid]; +"540 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "542 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"541 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "543 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"542 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "544 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"543 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "545 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024,)", style=solid]; +"544 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "545 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024, 1024)", style=solid]; +"545 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" -> "546 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"546 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" -> "547 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/view_1" [label="(1, 384, 1024)", style=solid]; +"547 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/view_1" -> "548 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/permute_1" [label="(1, 384, 16, 64)", style=solid]; +"548 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/permute_1" -> "559 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 64)", style=solid]; +"549 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/view_2" -> "550 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/permute_2" [label="(1, 384, 16, 64)", style=solid]; +"550 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/permute_2" -> "552 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 384, 64)", style=solid]; +"551 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" -> "552 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 64, 384)", style=solid]; +"552 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" -> "553 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" [label="(1, 16, 384, 384)", style=solid]; +"553 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" -> "554 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"554 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" -> "555 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/__add___0" [label="(1, 16, 384, 384)", style=solid]; +"555 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/__add___0" -> "556 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" [label="(1, 16, 384, 384)", style=solid]; +"556 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" -> "557 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"557 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" -> "558 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" [label="(1, 16, 384, 384)", style=solid]; +"558 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" -> "559 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 384)", style=solid]; +"559 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" -> "560 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 16, 384, 64)", style=solid]; +"560 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" -> "561 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/permute_3" [label="(1, 16, 384, 64)", style=solid]; +"561 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/permute_3" -> "562 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" [label="(1, 384, 16, 64)", style=solid]; +"562 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" -> "563 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/view_3" [label="(1, 384, 16, 64)", style=solid]; +"563 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfAttention[self]/view_3" -> "569 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"564 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "566 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"565 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "567 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"566 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "568 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"567 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "569 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"568 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "569 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 1024)", style=solid]; +"569 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" -> "570 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"570 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "571 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"571 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" -> "572 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"572 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/__add___0" -> "573 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"573 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "574 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"574 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "575 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"575 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "581 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"575 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "592 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"576 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "578 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(4096, 1024)", style=solid]; +"577 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "579 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(4096,)", style=solid]; +"578 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "580 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(4096, 1024)", style=solid]; +"579 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "581 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096,)", style=solid]; +"580 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "581 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096, 1024)", style=solid]; +"581 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" -> "582 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/gelu_0" [label="(1, 384, 4096)", style=solid]; +"582 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/gelu_0" -> "583 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 4096)", style=solid]; +"583 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" -> "589 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 4096)", style=solid]; +"584 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "586 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 4096)", style=solid]; +"585 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "587 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"586 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "588 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 4096)", style=solid]; +"587 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "589 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"588 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "589 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 4096)", style=solid]; +"589 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/linear_0" -> "590 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"590 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "591 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"591 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/Dropout[dropout]/dropout_0" -> "592 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"592 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/__add___0" -> "593 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"593 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "594 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"594 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "595 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"595 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "601 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1, 384, 1024)", style=solid]; +"595 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "608 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1, 384, 1024)", style=solid]; +"595 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "617 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1, 384, 1024)", style=solid]; +"595 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[7]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "644 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"596 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "598 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"597 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "599 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"598 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "600 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"599 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "601 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024,)", style=solid]; +"600 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "601 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024, 1024)", style=solid]; +"601 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" -> "602 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"602 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" -> "621 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/view_2" [label="(1, 384, 1024)", style=solid]; +"603 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "605 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"604 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "606 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"605 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "607 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"606 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "608 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024,)", style=solid]; +"607 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "608 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024, 1024)", style=solid]; +"608 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" -> "609 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"609 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" -> "610 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/view_0" [label="(1, 384, 1024)", style=solid]; +"610 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/view_0" -> "611 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/permute_0" [label="(1, 384, 16, 64)", style=solid]; +"611 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/permute_0" -> "623 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" [label="(1, 16, 384, 64)", style=solid]; +"612 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "614 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"613 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "615 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"614 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "616 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"615 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "617 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024,)", style=solid]; +"616 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "617 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024, 1024)", style=solid]; +"617 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" -> "618 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"618 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" -> "619 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/view_1" [label="(1, 384, 1024)", style=solid]; +"619 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/view_1" -> "620 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/permute_1" [label="(1, 384, 16, 64)", style=solid]; +"620 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/permute_1" -> "631 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 64)", style=solid]; +"621 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/view_2" -> "622 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/permute_2" [label="(1, 384, 16, 64)", style=solid]; +"622 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/permute_2" -> "624 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 384, 64)", style=solid]; +"623 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" -> "624 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 64, 384)", style=solid]; +"624 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" -> "625 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" [label="(1, 16, 384, 384)", style=solid]; +"625 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" -> "626 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"626 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" -> "627 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/__add___0" [label="(1, 16, 384, 384)", style=solid]; +"627 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/__add___0" -> "628 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" [label="(1, 16, 384, 384)", style=solid]; +"628 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" -> "629 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"629 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" -> "630 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" [label="(1, 16, 384, 384)", style=solid]; +"630 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" -> "631 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 384)", style=solid]; +"631 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" -> "632 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 16, 384, 64)", style=solid]; +"632 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" -> "633 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/permute_3" [label="(1, 16, 384, 64)", style=solid]; +"633 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/permute_3" -> "634 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" [label="(1, 384, 16, 64)", style=solid]; +"634 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" -> "635 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/view_3" [label="(1, 384, 16, 64)", style=solid]; +"635 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfAttention[self]/view_3" -> "641 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"636 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "638 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"637 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "639 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"638 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "640 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"639 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "641 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"640 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "641 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 1024)", style=solid]; +"641 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" -> "642 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"642 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "643 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"643 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" -> "644 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"644 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/__add___0" -> "645 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"645 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "646 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"646 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "647 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"647 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "653 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"647 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "664 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"648 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "650 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(4096, 1024)", style=solid]; +"649 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "651 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(4096,)", style=solid]; +"650 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "652 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(4096, 1024)", style=solid]; +"651 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "653 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096,)", style=solid]; +"652 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "653 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096, 1024)", style=solid]; +"653 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" -> "654 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/gelu_0" [label="(1, 384, 4096)", style=solid]; +"654 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/gelu_0" -> "655 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 4096)", style=solid]; +"655 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" -> "661 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 4096)", style=solid]; +"656 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "658 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 4096)", style=solid]; +"657 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "659 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"658 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "660 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 4096)", style=solid]; +"659 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "661 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"660 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "661 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 4096)", style=solid]; +"661 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/linear_0" -> "662 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"662 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "663 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"663 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/Dropout[dropout]/dropout_0" -> "664 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"664 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/__add___0" -> "665 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"665 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "666 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"666 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "667 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"667 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "673 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1, 384, 1024)", style=solid]; +"667 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "680 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1, 384, 1024)", style=solid]; +"667 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "689 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1, 384, 1024)", style=solid]; +"667 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[8]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "716 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"668 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "670 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"669 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "671 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"670 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "672 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"671 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "673 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024,)", style=solid]; +"672 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "673 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024, 1024)", style=solid]; +"673 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" -> "674 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"674 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" -> "693 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/view_2" [label="(1, 384, 1024)", style=solid]; +"675 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "677 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"676 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "678 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"677 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "679 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"678 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "680 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024,)", style=solid]; +"679 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "680 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024, 1024)", style=solid]; +"680 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" -> "681 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"681 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" -> "682 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/view_0" [label="(1, 384, 1024)", style=solid]; +"682 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/view_0" -> "683 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/permute_0" [label="(1, 384, 16, 64)", style=solid]; +"683 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/permute_0" -> "695 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" [label="(1, 16, 384, 64)", style=solid]; +"684 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "686 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"685 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "687 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"686 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "688 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"687 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "689 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024,)", style=solid]; +"688 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "689 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024, 1024)", style=solid]; +"689 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" -> "690 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"690 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" -> "691 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/view_1" [label="(1, 384, 1024)", style=solid]; +"691 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/view_1" -> "692 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/permute_1" [label="(1, 384, 16, 64)", style=solid]; +"692 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/permute_1" -> "703 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 64)", style=solid]; +"693 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/view_2" -> "694 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/permute_2" [label="(1, 384, 16, 64)", style=solid]; +"694 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/permute_2" -> "696 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 384, 64)", style=solid]; +"695 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" -> "696 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 64, 384)", style=solid]; +"696 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" -> "697 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" [label="(1, 16, 384, 384)", style=solid]; +"697 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" -> "698 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"698 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" -> "699 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/__add___0" [label="(1, 16, 384, 384)", style=solid]; +"699 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/__add___0" -> "700 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" [label="(1, 16, 384, 384)", style=solid]; +"700 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" -> "701 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"701 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" -> "702 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" [label="(1, 16, 384, 384)", style=solid]; +"702 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" -> "703 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 384)", style=solid]; +"703 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" -> "704 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 16, 384, 64)", style=solid]; +"704 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" -> "705 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/permute_3" [label="(1, 16, 384, 64)", style=solid]; +"705 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/permute_3" -> "706 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" [label="(1, 384, 16, 64)", style=solid]; +"706 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" -> "707 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/view_3" [label="(1, 384, 16, 64)", style=solid]; +"707 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfAttention[self]/view_3" -> "713 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"708 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "710 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"709 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "711 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"710 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "712 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"711 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "713 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"712 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "713 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 1024)", style=solid]; +"713 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" -> "714 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"714 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "715 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"715 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" -> "716 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"716 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/__add___0" -> "717 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"717 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "718 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"718 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "719 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"719 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "725 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"719 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "736 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"720 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "722 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(4096, 1024)", style=solid]; +"721 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "723 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(4096,)", style=solid]; +"722 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "724 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(4096, 1024)", style=solid]; +"723 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "725 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096,)", style=solid]; +"724 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "725 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096, 1024)", style=solid]; +"725 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" -> "726 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/gelu_0" [label="(1, 384, 4096)", style=solid]; +"726 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/gelu_0" -> "727 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 4096)", style=solid]; +"727 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" -> "733 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 4096)", style=solid]; +"728 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "730 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 4096)", style=solid]; +"729 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "731 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"730 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "732 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 4096)", style=solid]; +"731 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "733 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"732 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "733 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 4096)", style=solid]; +"733 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/linear_0" -> "734 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"734 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "735 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"735 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/Dropout[dropout]/dropout_0" -> "736 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"736 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/__add___0" -> "737 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"737 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "738 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"738 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "739 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"739 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "745 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1, 384, 1024)", style=solid]; +"739 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "752 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1, 384, 1024)", style=solid]; +"739 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "761 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1, 384, 1024)", style=solid]; +"739 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[9]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "788 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"740 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "742 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"741 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "743 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"742 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "744 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"743 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "745 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024,)", style=solid]; +"744 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "745 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024, 1024)", style=solid]; +"745 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" -> "746 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"746 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" -> "765 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/view_2" [label="(1, 384, 1024)", style=solid]; +"747 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "749 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"748 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "750 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"749 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "751 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"750 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "752 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024,)", style=solid]; +"751 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "752 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024, 1024)", style=solid]; +"752 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" -> "753 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"753 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" -> "754 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/view_0" [label="(1, 384, 1024)", style=solid]; +"754 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/view_0" -> "755 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/permute_0" [label="(1, 384, 16, 64)", style=solid]; +"755 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/permute_0" -> "767 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" [label="(1, 16, 384, 64)", style=solid]; +"756 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "758 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"757 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "759 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"758 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "760 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"759 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "761 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024,)", style=solid]; +"760 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "761 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024, 1024)", style=solid]; +"761 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" -> "762 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"762 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" -> "763 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/view_1" [label="(1, 384, 1024)", style=solid]; +"763 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/view_1" -> "764 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/permute_1" [label="(1, 384, 16, 64)", style=solid]; +"764 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/permute_1" -> "775 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 64)", style=solid]; +"765 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/view_2" -> "766 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/permute_2" [label="(1, 384, 16, 64)", style=solid]; +"766 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/permute_2" -> "768 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 384, 64)", style=solid]; +"767 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" -> "768 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 64, 384)", style=solid]; +"768 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" -> "769 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" [label="(1, 16, 384, 384)", style=solid]; +"769 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" -> "770 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"770 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" -> "771 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/__add___0" [label="(1, 16, 384, 384)", style=solid]; +"771 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/__add___0" -> "772 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" [label="(1, 16, 384, 384)", style=solid]; +"772 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" -> "773 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"773 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" -> "774 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" [label="(1, 16, 384, 384)", style=solid]; +"774 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" -> "775 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 384)", style=solid]; +"775 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" -> "776 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 16, 384, 64)", style=solid]; +"776 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" -> "777 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/permute_3" [label="(1, 16, 384, 64)", style=solid]; +"777 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/permute_3" -> "778 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" [label="(1, 384, 16, 64)", style=solid]; +"778 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" -> "779 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/view_3" [label="(1, 384, 16, 64)", style=solid]; +"779 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfAttention[self]/view_3" -> "785 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"780 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "782 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"781 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "783 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"782 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "784 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"783 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "785 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"784 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "785 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 1024)", style=solid]; +"785 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" -> "786 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"786 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "787 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"787 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" -> "788 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"788 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/__add___0" -> "789 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"789 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "790 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"790 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "791 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"791 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "797 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"791 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "808 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"792 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "794 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(4096, 1024)", style=solid]; +"793 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "795 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(4096,)", style=solid]; +"794 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "796 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(4096, 1024)", style=solid]; +"795 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "797 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096,)", style=solid]; +"796 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "797 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096, 1024)", style=solid]; +"797 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" -> "798 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/gelu_0" [label="(1, 384, 4096)", style=solid]; +"798 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/gelu_0" -> "799 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 4096)", style=solid]; +"799 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" -> "805 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 4096)", style=solid]; +"800 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "802 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 4096)", style=solid]; +"801 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "803 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"802 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "804 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 4096)", style=solid]; +"803 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "805 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"804 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "805 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 4096)", style=solid]; +"805 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/linear_0" -> "806 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"806 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "807 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"807 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/Dropout[dropout]/dropout_0" -> "808 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"808 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/__add___0" -> "809 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"809 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "810 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"810 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "811 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"811 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "817 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1, 384, 1024)", style=solid]; +"811 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "824 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1, 384, 1024)", style=solid]; +"811 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "833 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1, 384, 1024)", style=solid]; +"811 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[10]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "860 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"812 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "814 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"813 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "815 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"814 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "816 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"815 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "817 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024,)", style=solid]; +"816 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "817 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024, 1024)", style=solid]; +"817 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" -> "818 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"818 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" -> "837 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/view_2" [label="(1, 384, 1024)", style=solid]; +"819 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "821 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"820 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "822 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"821 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "823 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"822 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "824 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024,)", style=solid]; +"823 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "824 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024, 1024)", style=solid]; +"824 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" -> "825 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"825 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" -> "826 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/view_0" [label="(1, 384, 1024)", style=solid]; +"826 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/view_0" -> "827 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/permute_0" [label="(1, 384, 16, 64)", style=solid]; +"827 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/permute_0" -> "839 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" [label="(1, 16, 384, 64)", style=solid]; +"828 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "830 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"829 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "831 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"830 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "832 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"831 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "833 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024,)", style=solid]; +"832 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "833 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024, 1024)", style=solid]; +"833 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" -> "834 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"834 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" -> "835 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/view_1" [label="(1, 384, 1024)", style=solid]; +"835 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/view_1" -> "836 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/permute_1" [label="(1, 384, 16, 64)", style=solid]; +"836 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/permute_1" -> "847 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 64)", style=solid]; +"837 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/view_2" -> "838 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/permute_2" [label="(1, 384, 16, 64)", style=solid]; +"838 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/permute_2" -> "840 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 384, 64)", style=solid]; +"839 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" -> "840 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 64, 384)", style=solid]; +"840 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" -> "841 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" [label="(1, 16, 384, 384)", style=solid]; +"841 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" -> "842 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"842 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" -> "843 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/__add___0" [label="(1, 16, 384, 384)", style=solid]; +"843 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/__add___0" -> "844 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" [label="(1, 16, 384, 384)", style=solid]; +"844 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" -> "845 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"845 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" -> "846 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" [label="(1, 16, 384, 384)", style=solid]; +"846 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" -> "847 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 384)", style=solid]; +"847 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" -> "848 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 16, 384, 64)", style=solid]; +"848 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" -> "849 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/permute_3" [label="(1, 16, 384, 64)", style=solid]; +"849 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/permute_3" -> "850 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" [label="(1, 384, 16, 64)", style=solid]; +"850 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" -> "851 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/view_3" [label="(1, 384, 16, 64)", style=solid]; +"851 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfAttention[self]/view_3" -> "857 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"852 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "854 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"853 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "855 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"854 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "856 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"855 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "857 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"856 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "857 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 1024)", style=solid]; +"857 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" -> "858 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"858 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "859 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"859 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" -> "860 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"860 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/__add___0" -> "861 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"861 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "862 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"862 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "863 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"863 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "869 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"863 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "880 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"864 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "866 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(4096, 1024)", style=solid]; +"865 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "867 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(4096,)", style=solid]; +"866 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "868 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(4096, 1024)", style=solid]; +"867 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "869 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096,)", style=solid]; +"868 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "869 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096, 1024)", style=solid]; +"869 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" -> "870 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/gelu_0" [label="(1, 384, 4096)", style=solid]; +"870 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/gelu_0" -> "871 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 4096)", style=solid]; +"871 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" -> "877 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 4096)", style=solid]; +"872 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "874 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 4096)", style=solid]; +"873 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "875 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"874 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "876 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 4096)", style=solid]; +"875 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "877 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"876 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "877 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 4096)", style=solid]; +"877 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/linear_0" -> "878 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"878 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "879 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"879 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/Dropout[dropout]/dropout_0" -> "880 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"880 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/__add___0" -> "881 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"881 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "882 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"882 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "883 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"883 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "889 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1, 384, 1024)", style=solid]; +"883 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "896 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1, 384, 1024)", style=solid]; +"883 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "905 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1, 384, 1024)", style=solid]; +"883 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[11]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "932 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"884 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "886 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"885 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "887 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"886 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "888 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"887 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "889 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024,)", style=solid]; +"888 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "889 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024, 1024)", style=solid]; +"889 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" -> "890 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"890 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" -> "909 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/view_2" [label="(1, 384, 1024)", style=solid]; +"891 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "893 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"892 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "894 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"893 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "895 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"894 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "896 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024,)", style=solid]; +"895 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "896 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024, 1024)", style=solid]; +"896 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" -> "897 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"897 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" -> "898 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/view_0" [label="(1, 384, 1024)", style=solid]; +"898 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/view_0" -> "899 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/permute_0" [label="(1, 384, 16, 64)", style=solid]; +"899 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/permute_0" -> "911 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" [label="(1, 16, 384, 64)", style=solid]; +"900 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "902 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"901 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "903 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"902 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "904 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"903 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "905 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024,)", style=solid]; +"904 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "905 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024, 1024)", style=solid]; +"905 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" -> "906 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"906 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" -> "907 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/view_1" [label="(1, 384, 1024)", style=solid]; +"907 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/view_1" -> "908 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/permute_1" [label="(1, 384, 16, 64)", style=solid]; +"908 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/permute_1" -> "919 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 64)", style=solid]; +"909 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/view_2" -> "910 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/permute_2" [label="(1, 384, 16, 64)", style=solid]; +"910 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/permute_2" -> "912 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 384, 64)", style=solid]; +"911 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" -> "912 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 64, 384)", style=solid]; +"912 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" -> "913 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" [label="(1, 16, 384, 384)", style=solid]; +"913 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" -> "914 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"914 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" -> "915 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/__add___0" [label="(1, 16, 384, 384)", style=solid]; +"915 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/__add___0" -> "916 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" [label="(1, 16, 384, 384)", style=solid]; +"916 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" -> "917 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"917 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" -> "918 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" [label="(1, 16, 384, 384)", style=solid]; +"918 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" -> "919 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 384)", style=solid]; +"919 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" -> "920 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 16, 384, 64)", style=solid]; +"920 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" -> "921 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/permute_3" [label="(1, 16, 384, 64)", style=solid]; +"921 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/permute_3" -> "922 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" [label="(1, 384, 16, 64)", style=solid]; +"922 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" -> "923 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/view_3" [label="(1, 384, 16, 64)", style=solid]; +"923 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/view_3" -> "929 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"924 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "926 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"925 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "927 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"926 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "928 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"927 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "929 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"928 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "929 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 1024)", style=solid]; +"929 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" -> "930 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"930 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "931 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"931 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" -> "932 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"932 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/__add___0" -> "933 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"933 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "934 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"934 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "935 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"935 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "941 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"935 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "952 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"936 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "938 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(4096, 1024)", style=solid]; +"937 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "939 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(4096,)", style=solid]; +"938 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "940 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(4096, 1024)", style=solid]; +"939 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "941 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096,)", style=solid]; +"940 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "941 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096, 1024)", style=solid]; +"941 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" -> "942 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertIntermediate[intermediate]/gelu_0" [label="(1, 384, 4096)", style=solid]; +"942 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertIntermediate[intermediate]/gelu_0" -> "943 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 4096)", style=solid]; +"943 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" -> "949 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 4096)", style=solid]; +"944 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "946 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 4096)", style=solid]; +"945 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "947 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"946 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "948 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 4096)", style=solid]; +"947 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "949 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"948 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "949 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 4096)", style=solid]; +"949 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/NNCFLinear[dense]/linear_0" -> "950 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"950 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "951 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"951 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/Dropout[dropout]/dropout_0" -> "952 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"952 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/__add___0" -> "953 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"953 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "954 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"954 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "955 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"955 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "961 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1, 384, 1024)", style=solid]; +"955 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "968 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1, 384, 1024)", style=solid]; +"955 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "977 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1, 384, 1024)", style=solid]; +"955 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[12]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1004 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"956 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "958 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"957 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "959 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"958 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "960 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"959 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "961 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024,)", style=solid]; +"960 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "961 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024, 1024)", style=solid]; +"961 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" -> "962 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"962 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" -> "981 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/view_2" [label="(1, 384, 1024)", style=solid]; +"963 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "965 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"964 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "966 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"965 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "967 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"966 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "968 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024,)", style=solid]; +"967 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "968 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024, 1024)", style=solid]; +"968 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" -> "969 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"969 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" -> "970 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/view_0" [label="(1, 384, 1024)", style=solid]; +"970 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/view_0" -> "971 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/permute_0" [label="(1, 384, 16, 64)", style=solid]; +"971 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/permute_0" -> "983 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" [label="(1, 16, 384, 64)", style=solid]; +"972 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "974 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"973 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "975 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"974 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "976 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"975 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "977 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024,)", style=solid]; +"976 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "977 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024, 1024)", style=solid]; +"977 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" -> "978 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"978 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" -> "979 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/view_1" [label="(1, 384, 1024)", style=solid]; +"979 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/view_1" -> "980 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/permute_1" [label="(1, 384, 16, 64)", style=solid]; +"980 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/permute_1" -> "991 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 64)", style=solid]; +"981 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/view_2" -> "982 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/permute_2" [label="(1, 384, 16, 64)", style=solid]; +"982 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/permute_2" -> "984 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 384, 64)", style=solid]; +"983 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" -> "984 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 64, 384)", style=solid]; +"984 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" -> "985 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" [label="(1, 16, 384, 384)", style=solid]; +"985 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" -> "986 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"986 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" -> "987 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/__add___0" [label="(1, 16, 384, 384)", style=solid]; +"987 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/__add___0" -> "988 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" [label="(1, 16, 384, 384)", style=solid]; +"988 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" -> "989 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"989 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" -> "990 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" [label="(1, 16, 384, 384)", style=solid]; +"990 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" -> "991 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 384)", style=solid]; +"991 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" -> "992 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 16, 384, 64)", style=solid]; +"992 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" -> "993 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/permute_3" [label="(1, 16, 384, 64)", style=solid]; +"993 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/permute_3" -> "994 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" [label="(1, 384, 16, 64)", style=solid]; +"994 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" -> "995 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/view_3" [label="(1, 384, 16, 64)", style=solid]; +"995 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfAttention[self]/view_3" -> "1001 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"996 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "998 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"997 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "999 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"998 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1000 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"999 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1001 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"1000 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1001 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 1024)", style=solid]; +"1001 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" -> "1002 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1002 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "1003 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"1003 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" -> "1004 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1004 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/__add___0" -> "1005 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1005 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "1006 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"1006 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "1007 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1007 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1013 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1007 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1024 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1008 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1010 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(4096, 1024)", style=solid]; +"1009 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1011 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(4096,)", style=solid]; +"1010 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1012 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(4096, 1024)", style=solid]; +"1011 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1013 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096,)", style=solid]; +"1012 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1013 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096, 1024)", style=solid]; +"1013 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" -> "1014 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertIntermediate[intermediate]/gelu_0" [label="(1, 384, 4096)", style=solid]; +"1014 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertIntermediate[intermediate]/gelu_0" -> "1015 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 4096)", style=solid]; +"1015 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" -> "1021 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 4096)", style=solid]; +"1016 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1018 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 4096)", style=solid]; +"1017 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1019 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1018 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1020 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 4096)", style=solid]; +"1019 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1021 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"1020 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1021 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 4096)", style=solid]; +"1021 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/NNCFLinear[dense]/linear_0" -> "1022 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1022 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "1023 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"1023 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/Dropout[dropout]/dropout_0" -> "1024 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1024 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/__add___0" -> "1025 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1025 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "1026 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"1026 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "1027 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1027 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1033 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1027 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1040 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1027 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1049 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1027 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[13]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1076 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1028 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1030 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1029 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1031 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1030 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1032 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1031 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1033 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024,)", style=solid]; +"1032 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1033 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024, 1024)", style=solid]; +"1033 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" -> "1034 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1034 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" -> "1053 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/view_2" [label="(1, 384, 1024)", style=solid]; +"1035 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1037 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1036 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1038 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1037 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1039 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1038 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1040 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024,)", style=solid]; +"1039 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1040 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024, 1024)", style=solid]; +"1040 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" -> "1041 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1041 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" -> "1042 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/view_0" [label="(1, 384, 1024)", style=solid]; +"1042 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/view_0" -> "1043 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/permute_0" [label="(1, 384, 16, 64)", style=solid]; +"1043 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/permute_0" -> "1055 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" [label="(1, 16, 384, 64)", style=solid]; +"1044 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1046 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1045 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1047 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1046 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1048 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1047 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1049 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024,)", style=solid]; +"1048 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1049 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024, 1024)", style=solid]; +"1049 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" -> "1050 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1050 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" -> "1051 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/view_1" [label="(1, 384, 1024)", style=solid]; +"1051 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/view_1" -> "1052 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/permute_1" [label="(1, 384, 16, 64)", style=solid]; +"1052 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/permute_1" -> "1063 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 64)", style=solid]; +"1053 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/view_2" -> "1054 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/permute_2" [label="(1, 384, 16, 64)", style=solid]; +"1054 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/permute_2" -> "1056 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 384, 64)", style=solid]; +"1055 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" -> "1056 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 64, 384)", style=solid]; +"1056 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" -> "1057 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" [label="(1, 16, 384, 384)", style=solid]; +"1057 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" -> "1058 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"1058 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" -> "1059 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/__add___0" [label="(1, 16, 384, 384)", style=solid]; +"1059 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/__add___0" -> "1060 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" [label="(1, 16, 384, 384)", style=solid]; +"1060 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" -> "1061 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"1061 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" -> "1062 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" [label="(1, 16, 384, 384)", style=solid]; +"1062 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" -> "1063 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 384)", style=solid]; +"1063 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" -> "1064 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 16, 384, 64)", style=solid]; +"1064 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" -> "1065 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/permute_3" [label="(1, 16, 384, 64)", style=solid]; +"1065 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/permute_3" -> "1066 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" [label="(1, 384, 16, 64)", style=solid]; +"1066 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" -> "1067 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/view_3" [label="(1, 384, 16, 64)", style=solid]; +"1067 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfAttention[self]/view_3" -> "1073 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1068 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1070 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1069 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1071 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1070 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1072 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1071 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1073 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"1072 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1073 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 1024)", style=solid]; +"1073 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" -> "1074 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1074 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "1075 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"1075 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" -> "1076 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1076 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/__add___0" -> "1077 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1077 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "1078 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"1078 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "1079 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1079 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1085 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1079 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1096 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1080 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1082 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(4096, 1024)", style=solid]; +"1081 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1083 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(4096,)", style=solid]; +"1082 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1084 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(4096, 1024)", style=solid]; +"1083 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1085 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096,)", style=solid]; +"1084 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1085 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096, 1024)", style=solid]; +"1085 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" -> "1086 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertIntermediate[intermediate]/gelu_0" [label="(1, 384, 4096)", style=solid]; +"1086 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertIntermediate[intermediate]/gelu_0" -> "1087 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 4096)", style=solid]; +"1087 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" -> "1093 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 4096)", style=solid]; +"1088 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1090 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 4096)", style=solid]; +"1089 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1091 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1090 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1092 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 4096)", style=solid]; +"1091 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1093 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"1092 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1093 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 4096)", style=solid]; +"1093 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/NNCFLinear[dense]/linear_0" -> "1094 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1094 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "1095 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"1095 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/Dropout[dropout]/dropout_0" -> "1096 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1096 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/__add___0" -> "1097 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1097 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "1098 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"1098 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "1099 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1099 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1105 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1099 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1112 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1099 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1121 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1099 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[14]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1148 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1100 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1102 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1101 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1103 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1102 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1104 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1103 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1105 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024,)", style=solid]; +"1104 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1105 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024, 1024)", style=solid]; +"1105 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" -> "1106 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1106 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" -> "1125 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/view_2" [label="(1, 384, 1024)", style=solid]; +"1107 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1109 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1108 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1110 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1109 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1111 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1110 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1112 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024,)", style=solid]; +"1111 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1112 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024, 1024)", style=solid]; +"1112 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" -> "1113 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1113 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" -> "1114 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/view_0" [label="(1, 384, 1024)", style=solid]; +"1114 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/view_0" -> "1115 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/permute_0" [label="(1, 384, 16, 64)", style=solid]; +"1115 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/permute_0" -> "1127 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" [label="(1, 16, 384, 64)", style=solid]; +"1116 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1118 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1117 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1119 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1118 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1120 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1119 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1121 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024,)", style=solid]; +"1120 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1121 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024, 1024)", style=solid]; +"1121 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" -> "1122 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1122 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" -> "1123 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/view_1" [label="(1, 384, 1024)", style=solid]; +"1123 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/view_1" -> "1124 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/permute_1" [label="(1, 384, 16, 64)", style=solid]; +"1124 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/permute_1" -> "1135 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 64)", style=solid]; +"1125 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/view_2" -> "1126 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/permute_2" [label="(1, 384, 16, 64)", style=solid]; +"1126 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/permute_2" -> "1128 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 384, 64)", style=solid]; +"1127 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" -> "1128 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 64, 384)", style=solid]; +"1128 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" -> "1129 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" [label="(1, 16, 384, 384)", style=solid]; +"1129 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" -> "1130 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"1130 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" -> "1131 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/__add___0" [label="(1, 16, 384, 384)", style=solid]; +"1131 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/__add___0" -> "1132 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" [label="(1, 16, 384, 384)", style=solid]; +"1132 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" -> "1133 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"1133 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" -> "1134 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" [label="(1, 16, 384, 384)", style=solid]; +"1134 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" -> "1135 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 384)", style=solid]; +"1135 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" -> "1136 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 16, 384, 64)", style=solid]; +"1136 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" -> "1137 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/permute_3" [label="(1, 16, 384, 64)", style=solid]; +"1137 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/permute_3" -> "1138 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" [label="(1, 384, 16, 64)", style=solid]; +"1138 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" -> "1139 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/view_3" [label="(1, 384, 16, 64)", style=solid]; +"1139 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfAttention[self]/view_3" -> "1145 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1140 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1142 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1141 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1143 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1142 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1144 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1143 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1145 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"1144 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1145 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 1024)", style=solid]; +"1145 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" -> "1146 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1146 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "1147 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"1147 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" -> "1148 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1148 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/__add___0" -> "1149 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1149 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "1150 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"1150 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "1151 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1151 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1157 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1151 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1168 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1152 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1154 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(4096, 1024)", style=solid]; +"1153 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1155 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(4096,)", style=solid]; +"1154 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1156 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(4096, 1024)", style=solid]; +"1155 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1157 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096,)", style=solid]; +"1156 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1157 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096, 1024)", style=solid]; +"1157 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" -> "1158 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertIntermediate[intermediate]/gelu_0" [label="(1, 384, 4096)", style=solid]; +"1158 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertIntermediate[intermediate]/gelu_0" -> "1159 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 4096)", style=solid]; +"1159 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" -> "1165 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 4096)", style=solid]; +"1160 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1162 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 4096)", style=solid]; +"1161 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1163 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1162 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1164 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 4096)", style=solid]; +"1163 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1165 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"1164 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1165 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 4096)", style=solid]; +"1165 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/NNCFLinear[dense]/linear_0" -> "1166 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1166 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "1167 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"1167 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/Dropout[dropout]/dropout_0" -> "1168 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1168 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/__add___0" -> "1169 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1169 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "1170 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"1170 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "1171 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1171 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1177 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1171 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1184 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1171 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1193 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1171 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[15]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1220 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1172 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1174 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1173 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1175 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1174 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1176 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1175 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1177 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024,)", style=solid]; +"1176 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1177 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024, 1024)", style=solid]; +"1177 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" -> "1178 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1178 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" -> "1197 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/view_2" [label="(1, 384, 1024)", style=solid]; +"1179 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1181 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1180 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1182 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1181 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1183 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1182 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1184 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024,)", style=solid]; +"1183 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1184 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024, 1024)", style=solid]; +"1184 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" -> "1185 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1185 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" -> "1186 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/view_0" [label="(1, 384, 1024)", style=solid]; +"1186 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/view_0" -> "1187 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/permute_0" [label="(1, 384, 16, 64)", style=solid]; +"1187 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/permute_0" -> "1199 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" [label="(1, 16, 384, 64)", style=solid]; +"1188 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1190 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1189 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1191 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1190 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1192 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1191 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1193 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024,)", style=solid]; +"1192 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1193 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024, 1024)", style=solid]; +"1193 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" -> "1194 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1194 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" -> "1195 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/view_1" [label="(1, 384, 1024)", style=solid]; +"1195 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/view_1" -> "1196 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/permute_1" [label="(1, 384, 16, 64)", style=solid]; +"1196 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/permute_1" -> "1207 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 64)", style=solid]; +"1197 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/view_2" -> "1198 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/permute_2" [label="(1, 384, 16, 64)", style=solid]; +"1198 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/permute_2" -> "1200 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 384, 64)", style=solid]; +"1199 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" -> "1200 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 64, 384)", style=solid]; +"1200 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" -> "1201 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" [label="(1, 16, 384, 384)", style=solid]; +"1201 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" -> "1202 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"1202 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" -> "1203 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/__add___0" [label="(1, 16, 384, 384)", style=solid]; +"1203 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/__add___0" -> "1204 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" [label="(1, 16, 384, 384)", style=solid]; +"1204 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" -> "1205 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"1205 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" -> "1206 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" [label="(1, 16, 384, 384)", style=solid]; +"1206 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" -> "1207 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 384)", style=solid]; +"1207 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" -> "1208 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 16, 384, 64)", style=solid]; +"1208 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" -> "1209 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/permute_3" [label="(1, 16, 384, 64)", style=solid]; +"1209 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/permute_3" -> "1210 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" [label="(1, 384, 16, 64)", style=solid]; +"1210 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" -> "1211 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/view_3" [label="(1, 384, 16, 64)", style=solid]; +"1211 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfAttention[self]/view_3" -> "1217 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1212 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1214 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1213 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1215 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1214 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1216 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1215 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1217 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"1216 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1217 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 1024)", style=solid]; +"1217 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" -> "1218 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1218 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "1219 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"1219 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" -> "1220 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1220 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/__add___0" -> "1221 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1221 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "1222 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"1222 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "1223 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1223 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1229 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1223 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1240 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1224 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1226 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(4096, 1024)", style=solid]; +"1225 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1227 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(4096,)", style=solid]; +"1226 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1228 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(4096, 1024)", style=solid]; +"1227 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1229 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096,)", style=solid]; +"1228 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1229 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096, 1024)", style=solid]; +"1229 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" -> "1230 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertIntermediate[intermediate]/gelu_0" [label="(1, 384, 4096)", style=solid]; +"1230 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertIntermediate[intermediate]/gelu_0" -> "1231 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 4096)", style=solid]; +"1231 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" -> "1237 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 4096)", style=solid]; +"1232 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1234 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 4096)", style=solid]; +"1233 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1235 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1234 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1236 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 4096)", style=solid]; +"1235 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1237 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"1236 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1237 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 4096)", style=solid]; +"1237 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/NNCFLinear[dense]/linear_0" -> "1238 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1238 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "1239 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"1239 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/Dropout[dropout]/dropout_0" -> "1240 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1240 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/__add___0" -> "1241 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1241 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "1242 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"1242 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "1243 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1243 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1249 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1243 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1256 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1243 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1265 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1243 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[16]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1292 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1244 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1246 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1245 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1247 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1246 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1248 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1247 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1249 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024,)", style=solid]; +"1248 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1249 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024, 1024)", style=solid]; +"1249 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" -> "1250 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1250 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" -> "1269 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/view_2" [label="(1, 384, 1024)", style=solid]; +"1251 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1253 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1252 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1254 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1253 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1255 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1254 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1256 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024,)", style=solid]; +"1255 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1256 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024, 1024)", style=solid]; +"1256 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" -> "1257 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1257 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" -> "1258 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/view_0" [label="(1, 384, 1024)", style=solid]; +"1258 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/view_0" -> "1259 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/permute_0" [label="(1, 384, 16, 64)", style=solid]; +"1259 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/permute_0" -> "1271 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" [label="(1, 16, 384, 64)", style=solid]; +"1260 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1262 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1261 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1263 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1262 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1264 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1263 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1265 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024,)", style=solid]; +"1264 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1265 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024, 1024)", style=solid]; +"1265 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" -> "1266 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1266 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" -> "1267 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/view_1" [label="(1, 384, 1024)", style=solid]; +"1267 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/view_1" -> "1268 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/permute_1" [label="(1, 384, 16, 64)", style=solid]; +"1268 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/permute_1" -> "1279 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 64)", style=solid]; +"1269 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/view_2" -> "1270 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/permute_2" [label="(1, 384, 16, 64)", style=solid]; +"1270 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/permute_2" -> "1272 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 384, 64)", style=solid]; +"1271 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" -> "1272 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 64, 384)", style=solid]; +"1272 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" -> "1273 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" [label="(1, 16, 384, 384)", style=solid]; +"1273 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" -> "1274 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"1274 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" -> "1275 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/__add___0" [label="(1, 16, 384, 384)", style=solid]; +"1275 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/__add___0" -> "1276 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" [label="(1, 16, 384, 384)", style=solid]; +"1276 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" -> "1277 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"1277 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" -> "1278 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" [label="(1, 16, 384, 384)", style=solid]; +"1278 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" -> "1279 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 384)", style=solid]; +"1279 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" -> "1280 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 16, 384, 64)", style=solid]; +"1280 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" -> "1281 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/permute_3" [label="(1, 16, 384, 64)", style=solid]; +"1281 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/permute_3" -> "1282 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" [label="(1, 384, 16, 64)", style=solid]; +"1282 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" -> "1283 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/view_3" [label="(1, 384, 16, 64)", style=solid]; +"1283 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfAttention[self]/view_3" -> "1289 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1284 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1286 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1285 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1287 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1286 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1288 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1287 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1289 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"1288 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1289 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 1024)", style=solid]; +"1289 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" -> "1290 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1290 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "1291 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"1291 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" -> "1292 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1292 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/__add___0" -> "1293 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1293 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "1294 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"1294 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "1295 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1295 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1301 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1295 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1312 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1296 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1298 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(4096, 1024)", style=solid]; +"1297 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1299 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(4096,)", style=solid]; +"1298 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1300 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(4096, 1024)", style=solid]; +"1299 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1301 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096,)", style=solid]; +"1300 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1301 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096, 1024)", style=solid]; +"1301 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" -> "1302 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertIntermediate[intermediate]/gelu_0" [label="(1, 384, 4096)", style=solid]; +"1302 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertIntermediate[intermediate]/gelu_0" -> "1303 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 4096)", style=solid]; +"1303 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" -> "1309 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 4096)", style=solid]; +"1304 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1306 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 4096)", style=solid]; +"1305 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1307 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1306 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1308 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 4096)", style=solid]; +"1307 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1309 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"1308 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1309 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 4096)", style=solid]; +"1309 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/NNCFLinear[dense]/linear_0" -> "1310 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1310 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "1311 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"1311 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/Dropout[dropout]/dropout_0" -> "1312 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1312 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/__add___0" -> "1313 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1313 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "1314 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"1314 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "1315 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1315 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1321 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1315 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1328 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1315 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1337 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1315 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[17]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1364 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1316 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1318 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1317 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1319 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1318 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1320 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1319 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1321 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024,)", style=solid]; +"1320 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1321 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024, 1024)", style=solid]; +"1321 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" -> "1322 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1322 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" -> "1341 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/view_2" [label="(1, 384, 1024)", style=solid]; +"1323 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1325 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1324 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1326 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1325 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1327 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1326 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1328 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024,)", style=solid]; +"1327 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1328 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024, 1024)", style=solid]; +"1328 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" -> "1329 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1329 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" -> "1330 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/view_0" [label="(1, 384, 1024)", style=solid]; +"1330 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/view_0" -> "1331 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/permute_0" [label="(1, 384, 16, 64)", style=solid]; +"1331 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/permute_0" -> "1343 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" [label="(1, 16, 384, 64)", style=solid]; +"1332 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1334 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1333 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1335 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1334 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1336 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1335 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1337 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024,)", style=solid]; +"1336 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1337 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024, 1024)", style=solid]; +"1337 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" -> "1338 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1338 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" -> "1339 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/view_1" [label="(1, 384, 1024)", style=solid]; +"1339 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/view_1" -> "1340 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/permute_1" [label="(1, 384, 16, 64)", style=solid]; +"1340 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/permute_1" -> "1351 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 64)", style=solid]; +"1341 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/view_2" -> "1342 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/permute_2" [label="(1, 384, 16, 64)", style=solid]; +"1342 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/permute_2" -> "1344 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 384, 64)", style=solid]; +"1343 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" -> "1344 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 64, 384)", style=solid]; +"1344 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" -> "1345 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" [label="(1, 16, 384, 384)", style=solid]; +"1345 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" -> "1346 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"1346 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" -> "1347 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/__add___0" [label="(1, 16, 384, 384)", style=solid]; +"1347 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/__add___0" -> "1348 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" [label="(1, 16, 384, 384)", style=solid]; +"1348 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" -> "1349 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"1349 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" -> "1350 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" [label="(1, 16, 384, 384)", style=solid]; +"1350 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" -> "1351 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 384)", style=solid]; +"1351 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" -> "1352 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 16, 384, 64)", style=solid]; +"1352 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" -> "1353 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/permute_3" [label="(1, 16, 384, 64)", style=solid]; +"1353 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/permute_3" -> "1354 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" [label="(1, 384, 16, 64)", style=solid]; +"1354 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" -> "1355 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/view_3" [label="(1, 384, 16, 64)", style=solid]; +"1355 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfAttention[self]/view_3" -> "1361 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1356 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1358 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1357 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1359 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1358 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1360 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1359 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1361 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"1360 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1361 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 1024)", style=solid]; +"1361 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" -> "1362 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1362 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "1363 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"1363 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" -> "1364 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1364 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/__add___0" -> "1365 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1365 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "1366 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"1366 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "1367 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1367 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1373 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1367 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1384 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1368 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1370 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(4096, 1024)", style=solid]; +"1369 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1371 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(4096,)", style=solid]; +"1370 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1372 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(4096, 1024)", style=solid]; +"1371 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1373 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096,)", style=solid]; +"1372 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1373 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096, 1024)", style=solid]; +"1373 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" -> "1374 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertIntermediate[intermediate]/gelu_0" [label="(1, 384, 4096)", style=solid]; +"1374 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertIntermediate[intermediate]/gelu_0" -> "1375 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 4096)", style=solid]; +"1375 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" -> "1381 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 4096)", style=solid]; +"1376 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1378 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 4096)", style=solid]; +"1377 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1379 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1378 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1380 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 4096)", style=solid]; +"1379 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1381 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"1380 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1381 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 4096)", style=solid]; +"1381 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/NNCFLinear[dense]/linear_0" -> "1382 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1382 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "1383 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"1383 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/Dropout[dropout]/dropout_0" -> "1384 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1384 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/__add___0" -> "1385 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1385 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "1386 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"1386 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "1387 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1387 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1393 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1387 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1400 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1387 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1409 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1387 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[18]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1436 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1388 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1390 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1389 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1391 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1390 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1392 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1391 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1393 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024,)", style=solid]; +"1392 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1393 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024, 1024)", style=solid]; +"1393 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" -> "1394 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1394 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" -> "1413 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/view_2" [label="(1, 384, 1024)", style=solid]; +"1395 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1397 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1396 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1398 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1397 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1399 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1398 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1400 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024,)", style=solid]; +"1399 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1400 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024, 1024)", style=solid]; +"1400 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" -> "1401 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1401 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" -> "1402 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/view_0" [label="(1, 384, 1024)", style=solid]; +"1402 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/view_0" -> "1403 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/permute_0" [label="(1, 384, 16, 64)", style=solid]; +"1403 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/permute_0" -> "1415 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" [label="(1, 16, 384, 64)", style=solid]; +"1404 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1406 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1405 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1407 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1406 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1408 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1407 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1409 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024,)", style=solid]; +"1408 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1409 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024, 1024)", style=solid]; +"1409 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" -> "1410 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1410 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" -> "1411 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/view_1" [label="(1, 384, 1024)", style=solid]; +"1411 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/view_1" -> "1412 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/permute_1" [label="(1, 384, 16, 64)", style=solid]; +"1412 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/permute_1" -> "1423 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 64)", style=solid]; +"1413 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/view_2" -> "1414 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/permute_2" [label="(1, 384, 16, 64)", style=solid]; +"1414 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/permute_2" -> "1416 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 384, 64)", style=solid]; +"1415 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" -> "1416 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 64, 384)", style=solid]; +"1416 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" -> "1417 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" [label="(1, 16, 384, 384)", style=solid]; +"1417 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" -> "1418 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"1418 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" -> "1419 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/__add___0" [label="(1, 16, 384, 384)", style=solid]; +"1419 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/__add___0" -> "1420 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" [label="(1, 16, 384, 384)", style=solid]; +"1420 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" -> "1421 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"1421 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" -> "1422 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" [label="(1, 16, 384, 384)", style=solid]; +"1422 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" -> "1423 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 384)", style=solid]; +"1423 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" -> "1424 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 16, 384, 64)", style=solid]; +"1424 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" -> "1425 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/permute_3" [label="(1, 16, 384, 64)", style=solid]; +"1425 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/permute_3" -> "1426 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" [label="(1, 384, 16, 64)", style=solid]; +"1426 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" -> "1427 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/view_3" [label="(1, 384, 16, 64)", style=solid]; +"1427 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfAttention[self]/view_3" -> "1433 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1428 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1430 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1429 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1431 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1430 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1432 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1431 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1433 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"1432 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1433 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 1024)", style=solid]; +"1433 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" -> "1434 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1434 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "1435 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"1435 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" -> "1436 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1436 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/__add___0" -> "1437 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1437 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "1438 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"1438 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "1439 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1439 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1445 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1439 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1456 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1440 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1442 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(4096, 1024)", style=solid]; +"1441 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1443 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(4096,)", style=solid]; +"1442 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1444 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(4096, 1024)", style=solid]; +"1443 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1445 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096,)", style=solid]; +"1444 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1445 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096, 1024)", style=solid]; +"1445 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" -> "1446 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertIntermediate[intermediate]/gelu_0" [label="(1, 384, 4096)", style=solid]; +"1446 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertIntermediate[intermediate]/gelu_0" -> "1447 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 4096)", style=solid]; +"1447 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" -> "1453 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 4096)", style=solid]; +"1448 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1450 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 4096)", style=solid]; +"1449 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1451 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1450 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1452 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 4096)", style=solid]; +"1451 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1453 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"1452 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1453 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 4096)", style=solid]; +"1453 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/NNCFLinear[dense]/linear_0" -> "1454 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1454 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "1455 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"1455 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/Dropout[dropout]/dropout_0" -> "1456 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1456 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/__add___0" -> "1457 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1457 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "1458 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"1458 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "1459 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1459 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1465 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1459 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1472 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1459 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1481 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1459 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[19]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1508 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1460 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1462 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1461 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1463 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1462 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1464 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1463 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1465 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024,)", style=solid]; +"1464 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1465 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024, 1024)", style=solid]; +"1465 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" -> "1466 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1466 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" -> "1485 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/view_2" [label="(1, 384, 1024)", style=solid]; +"1467 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1469 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1468 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1470 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1469 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1471 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1470 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1472 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024,)", style=solid]; +"1471 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1472 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024, 1024)", style=solid]; +"1472 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" -> "1473 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1473 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" -> "1474 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/view_0" [label="(1, 384, 1024)", style=solid]; +"1474 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/view_0" -> "1475 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/permute_0" [label="(1, 384, 16, 64)", style=solid]; +"1475 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/permute_0" -> "1487 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" [label="(1, 16, 384, 64)", style=solid]; +"1476 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1478 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1477 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1479 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1478 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1480 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1479 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1481 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024,)", style=solid]; +"1480 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1481 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024, 1024)", style=solid]; +"1481 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" -> "1482 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1482 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" -> "1483 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/view_1" [label="(1, 384, 1024)", style=solid]; +"1483 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/view_1" -> "1484 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/permute_1" [label="(1, 384, 16, 64)", style=solid]; +"1484 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/permute_1" -> "1495 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 64)", style=solid]; +"1485 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/view_2" -> "1486 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/permute_2" [label="(1, 384, 16, 64)", style=solid]; +"1486 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/permute_2" -> "1488 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 384, 64)", style=solid]; +"1487 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" -> "1488 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 64, 384)", style=solid]; +"1488 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" -> "1489 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" [label="(1, 16, 384, 384)", style=solid]; +"1489 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" -> "1490 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"1490 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" -> "1491 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/__add___0" [label="(1, 16, 384, 384)", style=solid]; +"1491 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/__add___0" -> "1492 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" [label="(1, 16, 384, 384)", style=solid]; +"1492 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" -> "1493 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"1493 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" -> "1494 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" [label="(1, 16, 384, 384)", style=solid]; +"1494 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" -> "1495 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 384)", style=solid]; +"1495 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" -> "1496 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 16, 384, 64)", style=solid]; +"1496 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" -> "1497 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/permute_3" [label="(1, 16, 384, 64)", style=solid]; +"1497 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/permute_3" -> "1498 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" [label="(1, 384, 16, 64)", style=solid]; +"1498 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" -> "1499 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/view_3" [label="(1, 384, 16, 64)", style=solid]; +"1499 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfAttention[self]/view_3" -> "1505 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1500 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1502 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1501 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1503 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1502 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1504 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1503 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1505 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"1504 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1505 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 1024)", style=solid]; +"1505 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" -> "1506 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1506 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "1507 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"1507 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" -> "1508 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1508 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/__add___0" -> "1509 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1509 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "1510 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"1510 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "1511 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1511 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1517 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1511 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1528 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1512 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1514 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(4096, 1024)", style=solid]; +"1513 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1515 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(4096,)", style=solid]; +"1514 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1516 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(4096, 1024)", style=solid]; +"1515 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1517 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096,)", style=solid]; +"1516 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1517 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096, 1024)", style=solid]; +"1517 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" -> "1518 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertIntermediate[intermediate]/gelu_0" [label="(1, 384, 4096)", style=solid]; +"1518 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertIntermediate[intermediate]/gelu_0" -> "1519 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 4096)", style=solid]; +"1519 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" -> "1525 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 4096)", style=solid]; +"1520 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1522 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 4096)", style=solid]; +"1521 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1523 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1522 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1524 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 4096)", style=solid]; +"1523 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1525 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"1524 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1525 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 4096)", style=solid]; +"1525 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/NNCFLinear[dense]/linear_0" -> "1526 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1526 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "1527 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"1527 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/Dropout[dropout]/dropout_0" -> "1528 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1528 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/__add___0" -> "1529 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1529 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "1530 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"1530 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "1531 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1531 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1537 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1531 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1544 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1531 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1553 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1531 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[20]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1580 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1532 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1534 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1533 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1535 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1534 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1536 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1535 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1537 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024,)", style=solid]; +"1536 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1537 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024, 1024)", style=solid]; +"1537 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" -> "1538 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1538 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" -> "1557 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/view_2" [label="(1, 384, 1024)", style=solid]; +"1539 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1541 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1540 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1542 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1541 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1543 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1542 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1544 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024,)", style=solid]; +"1543 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1544 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024, 1024)", style=solid]; +"1544 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" -> "1545 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1545 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" -> "1546 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/view_0" [label="(1, 384, 1024)", style=solid]; +"1546 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/view_0" -> "1547 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/permute_0" [label="(1, 384, 16, 64)", style=solid]; +"1547 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/permute_0" -> "1559 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" [label="(1, 16, 384, 64)", style=solid]; +"1548 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1550 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1549 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1551 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1550 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1552 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1551 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1553 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024,)", style=solid]; +"1552 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1553 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024, 1024)", style=solid]; +"1553 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" -> "1554 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1554 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" -> "1555 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/view_1" [label="(1, 384, 1024)", style=solid]; +"1555 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/view_1" -> "1556 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/permute_1" [label="(1, 384, 16, 64)", style=solid]; +"1556 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/permute_1" -> "1567 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 64)", style=solid]; +"1557 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/view_2" -> "1558 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/permute_2" [label="(1, 384, 16, 64)", style=solid]; +"1558 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/permute_2" -> "1560 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 384, 64)", style=solid]; +"1559 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" -> "1560 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 64, 384)", style=solid]; +"1560 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" -> "1561 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" [label="(1, 16, 384, 384)", style=solid]; +"1561 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" -> "1562 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"1562 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" -> "1563 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/__add___0" [label="(1, 16, 384, 384)", style=solid]; +"1563 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/__add___0" -> "1564 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" [label="(1, 16, 384, 384)", style=solid]; +"1564 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" -> "1565 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"1565 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" -> "1566 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" [label="(1, 16, 384, 384)", style=solid]; +"1566 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" -> "1567 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 384)", style=solid]; +"1567 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" -> "1568 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 16, 384, 64)", style=solid]; +"1568 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" -> "1569 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/permute_3" [label="(1, 16, 384, 64)", style=solid]; +"1569 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/permute_3" -> "1570 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" [label="(1, 384, 16, 64)", style=solid]; +"1570 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" -> "1571 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/view_3" [label="(1, 384, 16, 64)", style=solid]; +"1571 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfAttention[self]/view_3" -> "1577 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1572 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1574 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1573 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1575 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1574 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1576 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1575 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1577 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"1576 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1577 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 1024)", style=solid]; +"1577 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" -> "1578 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1578 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "1579 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"1579 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" -> "1580 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1580 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/__add___0" -> "1581 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1581 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "1582 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"1582 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "1583 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1583 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1589 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1583 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1600 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1584 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1586 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(4096, 1024)", style=solid]; +"1585 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1587 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(4096,)", style=solid]; +"1586 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1588 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(4096, 1024)", style=solid]; +"1587 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1589 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096,)", style=solid]; +"1588 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1589 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096, 1024)", style=solid]; +"1589 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" -> "1590 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertIntermediate[intermediate]/gelu_0" [label="(1, 384, 4096)", style=solid]; +"1590 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertIntermediate[intermediate]/gelu_0" -> "1591 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 4096)", style=solid]; +"1591 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" -> "1597 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 4096)", style=solid]; +"1592 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1594 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 4096)", style=solid]; +"1593 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1595 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1594 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1596 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 4096)", style=solid]; +"1595 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1597 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"1596 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1597 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 4096)", style=solid]; +"1597 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/NNCFLinear[dense]/linear_0" -> "1598 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1598 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "1599 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"1599 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/Dropout[dropout]/dropout_0" -> "1600 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1600 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/__add___0" -> "1601 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1601 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "1602 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"1602 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "1603 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1603 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1609 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1603 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1616 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1603 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1625 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1603 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[21]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1652 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1604 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1606 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1605 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1607 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1606 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1608 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1607 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1609 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024,)", style=solid]; +"1608 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1609 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024, 1024)", style=solid]; +"1609 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" -> "1610 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1610 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" -> "1629 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/view_2" [label="(1, 384, 1024)", style=solid]; +"1611 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1613 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1612 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1614 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1613 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1615 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1614 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1616 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024,)", style=solid]; +"1615 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1616 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024, 1024)", style=solid]; +"1616 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" -> "1617 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1617 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" -> "1618 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/view_0" [label="(1, 384, 1024)", style=solid]; +"1618 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/view_0" -> "1619 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/permute_0" [label="(1, 384, 16, 64)", style=solid]; +"1619 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/permute_0" -> "1631 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" [label="(1, 16, 384, 64)", style=solid]; +"1620 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1622 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1621 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1623 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1622 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1624 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1623 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1625 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024,)", style=solid]; +"1624 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1625 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024, 1024)", style=solid]; +"1625 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" -> "1626 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1626 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" -> "1627 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/view_1" [label="(1, 384, 1024)", style=solid]; +"1627 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/view_1" -> "1628 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/permute_1" [label="(1, 384, 16, 64)", style=solid]; +"1628 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/permute_1" -> "1639 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 64)", style=solid]; +"1629 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/view_2" -> "1630 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/permute_2" [label="(1, 384, 16, 64)", style=solid]; +"1630 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/permute_2" -> "1632 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 384, 64)", style=solid]; +"1631 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" -> "1632 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 64, 384)", style=solid]; +"1632 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" -> "1633 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" [label="(1, 16, 384, 384)", style=solid]; +"1633 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" -> "1634 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"1634 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" -> "1635 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/__add___0" [label="(1, 16, 384, 384)", style=solid]; +"1635 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/__add___0" -> "1636 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" [label="(1, 16, 384, 384)", style=solid]; +"1636 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" -> "1637 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"1637 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" -> "1638 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" [label="(1, 16, 384, 384)", style=solid]; +"1638 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" -> "1639 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 384)", style=solid]; +"1639 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" -> "1640 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 16, 384, 64)", style=solid]; +"1640 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" -> "1641 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/permute_3" [label="(1, 16, 384, 64)", style=solid]; +"1641 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/permute_3" -> "1642 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" [label="(1, 384, 16, 64)", style=solid]; +"1642 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" -> "1643 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/view_3" [label="(1, 384, 16, 64)", style=solid]; +"1643 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfAttention[self]/view_3" -> "1649 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1644 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1646 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1645 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1647 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1646 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1648 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1647 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1649 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"1648 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1649 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 1024)", style=solid]; +"1649 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" -> "1650 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1650 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "1651 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"1651 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" -> "1652 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1652 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/__add___0" -> "1653 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1653 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "1654 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"1654 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "1655 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1655 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1661 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1655 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1672 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1656 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1658 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(4096, 1024)", style=solid]; +"1657 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1659 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(4096,)", style=solid]; +"1658 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1660 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(4096, 1024)", style=solid]; +"1659 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1661 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096,)", style=solid]; +"1660 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1661 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096, 1024)", style=solid]; +"1661 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" -> "1662 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertIntermediate[intermediate]/gelu_0" [label="(1, 384, 4096)", style=solid]; +"1662 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertIntermediate[intermediate]/gelu_0" -> "1663 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 4096)", style=solid]; +"1663 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" -> "1669 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 4096)", style=solid]; +"1664 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1666 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 4096)", style=solid]; +"1665 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1667 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1666 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1668 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 4096)", style=solid]; +"1667 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1669 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"1668 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1669 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 4096)", style=solid]; +"1669 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/NNCFLinear[dense]/linear_0" -> "1670 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1670 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "1671 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"1671 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/Dropout[dropout]/dropout_0" -> "1672 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1672 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/__add___0" -> "1673 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1673 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "1674 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"1674 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "1675 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1675 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1681 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1675 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1688 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1675 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1697 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1675 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[22]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1724 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1676 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1678 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1677 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1679 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1678 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1680 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1679 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1681 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024,)", style=solid]; +"1680 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1681 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" [label="(1024, 1024)", style=solid]; +"1681 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/linear_0" -> "1682 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1682 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[query]/SymmetricQuantizer/symmetric_quantize_0" -> "1701 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/view_2" [label="(1, 384, 1024)", style=solid]; +"1683 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1685 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1684 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1686 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1685 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1687 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1686 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1688 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024,)", style=solid]; +"1687 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1688 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" [label="(1024, 1024)", style=solid]; +"1688 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/linear_0" -> "1689 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1689 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[key]/SymmetricQuantizer/symmetric_quantize_0" -> "1690 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/view_0" [label="(1, 384, 1024)", style=solid]; +"1690 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/view_0" -> "1691 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/permute_0" [label="(1, 384, 16, 64)", style=solid]; +"1691 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/permute_0" -> "1703 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" [label="(1, 16, 384, 64)", style=solid]; +"1692 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1694 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1693 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1695 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1694 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1696 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1695 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1697 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024,)", style=solid]; +"1696 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1697 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" [label="(1024, 1024)", style=solid]; +"1697 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/linear_0" -> "1698 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1698 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/NNCFLinear[value]/SymmetricQuantizer/symmetric_quantize_0" -> "1699 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/view_1" [label="(1, 384, 1024)", style=solid]; +"1699 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/view_1" -> "1700 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/permute_1" [label="(1, 384, 16, 64)", style=solid]; +"1700 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/permute_1" -> "1711 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 64)", style=solid]; +"1701 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/view_2" -> "1702 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/permute_2" [label="(1, 384, 16, 64)", style=solid]; +"1702 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/permute_2" -> "1704 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 384, 64)", style=solid]; +"1703 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/transpose_0" -> "1704 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" [label="(1, 16, 64, 384)", style=solid]; +"1704 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/matmul_0" -> "1705 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" [label="(1, 16, 384, 384)", style=solid]; +"1705 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/__truediv___0" -> "1706 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"1706 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_0" -> "1707 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/__add___0" [label="(1, 16, 384, 384)", style=solid]; +"1707 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/__add___0" -> "1708 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" [label="(1, 16, 384, 384)", style=solid]; +"1708 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/Softmax/softmax_0" -> "1709 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 16, 384, 384)", style=solid]; +"1709 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/Softmax/SymmetricQuantizer/symmetric_quantize_0" -> "1710 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" [label="(1, 16, 384, 384)", style=solid]; +"1710 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/Dropout[dropout]/dropout_0" -> "1711 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" [label="(1, 16, 384, 384)", style=solid]; +"1711 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/matmul_1" -> "1712 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" [label="(1, 16, 384, 64)", style=solid]; +"1712 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/SymmetricQuantizer/symmetric_quantize_1" -> "1713 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/permute_3" [label="(1, 16, 384, 64)", style=solid]; +"1713 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/permute_3" -> "1714 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" [label="(1, 384, 16, 64)", style=solid]; +"1714 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/contiguous_0" -> "1715 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/view_3" [label="(1, 384, 16, 64)", style=solid]; +"1715 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfAttention[self]/view_3" -> "1721 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1716 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1718 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 1024)", style=solid]; +"1717 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1719 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1718 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1720 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 1024)", style=solid]; +"1719 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1721 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"1720 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1721 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 1024)", style=solid]; +"1721 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/linear_0" -> "1722 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1722 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "1723 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"1723 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/Dropout[dropout]/dropout_0" -> "1724 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1724 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/__add___0" -> "1725 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1725 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "1726 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"1726 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "1727 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1727 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1733 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1727 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertAttention[attention]/BertSelfOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1744 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1728 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1730 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(4096, 1024)", style=solid]; +"1729 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1731 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(4096,)", style=solid]; +"1730 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1732 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(4096, 1024)", style=solid]; +"1731 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1733 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096,)", style=solid]; +"1732 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertIntermediate[intermediate]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1733 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" [label="(4096, 1024)", style=solid]; +"1733 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertIntermediate[intermediate]/NNCFLinear[dense]/linear_0" -> "1734 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertIntermediate[intermediate]/gelu_0" [label="(1, 384, 4096)", style=solid]; +"1734 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertIntermediate[intermediate]/gelu_0" -> "1735 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 4096)", style=solid]; +"1735 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertIntermediate[intermediate]/SymmetricQuantizer/symmetric_quantize_0" -> "1741 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1, 384, 4096)", style=solid]; +"1736 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_0" -> "1738 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" [label="(1024, 4096)", style=solid]; +"1737 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/binary_mask_by_threshold_1" -> "1739 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" [label="(1024,)", style=solid]; +"1738 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_0" -> "1740 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" [label="(1024, 4096)", style=solid]; +"1739 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeightAndBias[0]/MovementSparsifier[op]/apply_binary_mask_1" -> "1741 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024,)", style=solid]; +"1740 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/NNCFLinear[dense]/ModuleDict[pre_ops]/UpdateWeight[1]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1741 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/NNCFLinear[dense]/linear_0" [label="(1024, 4096)", style=solid]; +"1741 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/NNCFLinear[dense]/linear_0" -> "1742 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1742 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/NNCFLinear[dense]/SymmetricQuantizer/symmetric_quantize_0" -> "1743 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/Dropout[dropout]/dropout_0" [label="(1, 384, 1024)", style=solid]; +"1743 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/Dropout[dropout]/dropout_0" -> "1744 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/__add___0" [label="(1, 384, 1024)", style=solid]; +"1744 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/__add___0" -> "1745 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1745 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/SymmetricQuantizer/symmetric_quantize_0" -> "1746 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" [label="(1, 384, 1024)", style=solid]; +"1746 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/LayerNorm[LayerNorm]/layer_norm_0" -> "1747 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" [label="(1, 384, 1024)", style=solid]; +"1747 BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/ModuleList[layer]/BertLayer[23]/BertOutput[output]/LayerNorm[LayerNorm]/SymmetricQuantizer/symmetric_quantize_0" -> "1749 BertForQuestionAnswering/NNCFLinear[qa_outputs]/linear_0" [label="(1, 384, 1024)", style=solid]; +"1748 BertForQuestionAnswering/NNCFLinear[qa_outputs]/ModuleDict[pre_ops]/UpdateWeight[0]/SymmetricQuantizer[op]/symmetric_quantize_0" -> "1749 BertForQuestionAnswering/NNCFLinear[qa_outputs]/linear_0" [label="(2, 1024)", style=solid]; +"1749 BertForQuestionAnswering/NNCFLinear[qa_outputs]/linear_0" -> "1750 BertForQuestionAnswering/split_0" [label="(1, 384, 2)", style=solid]; +"1750 BertForQuestionAnswering/split_0" -> "1751 BertForQuestionAnswering/squeeze_0" [label="(1, 384, 1)", style=solid]; +"1750 BertForQuestionAnswering/split_0" -> "1753 BertForQuestionAnswering/squeeze_1" [label="(1, 384, 1)", style=solid]; +"1751 BertForQuestionAnswering/squeeze_0" -> "1752 BertForQuestionAnswering/contiguous_0" [label="(1, 384)", style=solid]; +"1752 BertForQuestionAnswering/contiguous_0" -> "1755 /nncf_model_output_0" [label="(1, 384)", style=solid]; +"1753 BertForQuestionAnswering/squeeze_1" -> "1754 BertForQuestionAnswering/contiguous_1" [label="(1, 384)", style=solid]; +"1754 BertForQuestionAnswering/contiguous_1" -> "1756 /nncf_model_output_1" [label="(1, 384)", style=solid]; +}