七百年後 commited on
Commit ·
61cdd1a
1
Parent(s): caea937
Add new file
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .gitattributes +1 -0
- TreeTest.py +13 -0
- analyze_data.py +23 -0
- bertviz_test.py +23 -0
- chinese_wwm_pytorch/bert_config.json +19 -0
- chinese_wwm_pytorch/config.json +20 -0
- chinese_wwm_pytorch/pytorch_model.bin +3 -0
- chinese_wwm_pytorch/vocab.txt +0 -0
- chinese_wwm_pytorch/vocab_ori.txt +0 -0
- config.py +82 -0
- config2.py +76 -0
- data/README.txt +2 -0
- data/WORDMAP.json +2367 -0
- data/ai_challenger_fsauor2018_testa_20180816.zip +3 -0
- data/ai_challenger_fsauor2018_trainingset_20180816.zip +3 -0
- data/ai_challenger_fsauor2018_validationset_20180816.zip +3 -0
- data/ai_challenger_sentiment_analysis_testa_20180816/README.txt +2 -0
- data/ai_challenger_sentiment_analysis_testa_20180816/protocol.txt +12 -0
- data/ai_challenger_sentiment_analysis_testa_20180816/sentiment_analysis_testa.csv +3 -0
- data/ai_challenger_sentiment_analysis_testa_20180816/sentiment_analysis_testa2.csv +3 -0
- data/ai_challenger_sentiment_analysis_trainingset_20180816/README.txt +3 -0
- data/ai_challenger_sentiment_analysis_trainingset_20180816/protocol.txt +12 -0
- data/ai_challenger_sentiment_analysis_trainingset_20180816/sentiment_analysis_trainingset.csv +3 -0
- data/ai_challenger_sentiment_analysis_trainingset_20180816/sentiment_analysis_trainingset_annotations.docx +0 -0
- data/ai_challenger_sentiment_analysis_validationset_20180816/README.txt +3 -0
- data/ai_challenger_sentiment_analysis_validationset_20180816/protocol.txt +12 -0
- data/ai_challenger_sentiment_analysis_validationset_20180816/sentiment_analysis_validationset.csv +3 -0
- data/ai_challenger_sentiment_analysis_validationset_20180816/sentiment_analysis_validationset_annotations.docx +0 -0
- data/ai_challenger_sentiment_analysis_validationset_20180816/~$ntiment_analysis_validationset_annotations.docx +0 -0
- data/protocol.txt +12 -0
- data/test/test_data.csv +3 -0
- data/test/test_data_old.csv +3 -0
- data/total.csv +3 -0
- data/train/train_data.csv +3 -0
- data/train/train_data.txt +0 -0
- data/train/train_data2.csv +3 -0
- data/train/train_data_old.csv +3 -0
- data/valid/val_data.csv +3 -0
- data/valid/val_data_old.csv +3 -0
- data/valid/专家评语.csv +3 -0
- data_gen.py +249 -0
- data_make.py +22 -0
- ddparser_test.py +8 -0
- demo.py +80 -0
- extract.py +31 -0
- gbk2utf.py +13 -0
- jieba_cut.py +71 -0
- log.txt +287 -0
- model2.py +298 -0
- models.py +873 -0
.gitattributes
CHANGED
|
@@ -1,6 +1,7 @@
|
|
| 1 |
*.7z filter=lfs diff=lfs merge=lfs -text
|
| 2 |
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 3 |
*.bin filter=lfs diff=lfs merge=lfs -text
|
|
|
|
| 4 |
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
| 5 |
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
| 6 |
*.ftz filter=lfs diff=lfs merge=lfs -text
|
|
|
|
| 1 |
*.7z filter=lfs diff=lfs merge=lfs -text
|
| 2 |
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 3 |
*.bin filter=lfs diff=lfs merge=lfs -text
|
| 4 |
+
*.csv filter=lfs diff=lfs merge=lfs -text
|
| 5 |
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
| 6 |
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
| 7 |
*.ftz filter=lfs diff=lfs merge=lfs -text
|
TreeTest.py
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from ddparser import DDParser
|
| 2 |
+
#2.4.0版本 的paddlepaddle
|
| 3 |
+
import nltk
|
| 4 |
+
# ddp = DDParser()
|
| 5 |
+
from nltk.corpus import treebank
|
| 6 |
+
# print(res)
|
| 7 |
+
|
| 8 |
+
text=nltk.word_tokenize("论文从存货内部控制的五要素,看YT 超市存货内部控制的整体现状,发现其内部控制各关键环节存在的问题,并对原因进行分析,结合YT 超市的具体情况,依次分析存在问题环节的关键风险点、控制目标,并提出具体改进建议。论文从存货的分类角度去发现重点环节内部控制存在的问题。论文选题恰当,结构合理,论述清晰,写作规范,合乎逻辑。论文达到硕士毕业水平。")
|
| 9 |
+
t = treebank.parsed_sents('wsj_0001.mrg')[0]
|
| 10 |
+
t.draw()
|
| 11 |
+
print(text)
|
| 12 |
+
print(nltk.pos_tag(text))
|
| 13 |
+
|
analyze_data.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import jieba
|
| 2 |
+
import matplotlib.pyplot as plt
|
| 3 |
+
from tqdm import tqdm
|
| 4 |
+
|
| 5 |
+
from utils import parse_user_reviews
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def analyze(contents):
|
| 9 |
+
sent_lengths = []
|
| 10 |
+
|
| 11 |
+
for sentence in tqdm(contents):
|
| 12 |
+
seg_list = list(jieba.cut(sentence.strip()))
|
| 13 |
+
# Update word frequency
|
| 14 |
+
sent_lengths.append(len(seg_list))
|
| 15 |
+
|
| 16 |
+
num_bins = 100
|
| 17 |
+
n, bins, patches = plt.hist(sent_lengths, num_bins, facecolor='blue', alpha=0.5)
|
| 18 |
+
plt.show()
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
if __name__ == '__main__':
|
| 22 |
+
user_reviews = parse_user_reviews('train')
|
| 23 |
+
analyze(user_reviews['content'])
|
bertviz_test.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# coding: utf-8
|
| 2 |
+
from transformers import AutoTokenizer, AutoModel, utils
|
| 3 |
+
from bertviz import model_view, head_view
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
utils.logging.set_verbosity_error() # Suppress standard warnings
|
| 7 |
+
model_name='./chinese_wwm_pytorch'
|
| 8 |
+
input_text = '论文书写规范,逻辑合理,反映作者具有一定的分析问题、解决问题的能力'
|
| 9 |
+
tokenizer = AutoTokenizer.from_pretrained('./chinese_wwm_pytorch')
|
| 10 |
+
tokenizer.add_special_tokens({ "additional_special_tokens": [ "[unused1]", "[unused2]", "[unused3]"] })
|
| 11 |
+
model = AutoModel.from_pretrained(model_name, output_attentions=True) # Configure model to return attention values
|
| 12 |
+
inputs = tokenizer.encode(input_text, return_tensors='pt') # Tokenize input text
|
| 13 |
+
print('inputs:', inputs)
|
| 14 |
+
outputs = model(inputs) # Run model
|
| 15 |
+
attention = outputs[-1] # Retrieve attention from model outputs
|
| 16 |
+
tokens = tokenizer.convert_ids_to_tokens(inputs[0]) # Convert input ids to token strings
|
| 17 |
+
print('tokens:', tokens)
|
| 18 |
+
# print('attention:',attention)
|
| 19 |
+
# model_view(attention, tokens) # Display model view
|
| 20 |
+
html_head_view = head_view(attention, tokens, html_action='return')
|
| 21 |
+
|
| 22 |
+
with open("./bert.html", 'w') as file:
|
| 23 |
+
file.write(html_head_view.data)
|
chinese_wwm_pytorch/bert_config.json
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"attention_probs_dropout_prob": 0.1,
|
| 3 |
+
"directionality": "bidi",
|
| 4 |
+
"hidden_act": "gelu",
|
| 5 |
+
"hidden_dropout_prob": 0.1,
|
| 6 |
+
"hidden_size": 768,
|
| 7 |
+
"initializer_range": 0.02,
|
| 8 |
+
"intermediate_size": 3072,
|
| 9 |
+
"max_position_embeddings": 512,
|
| 10 |
+
"num_attention_heads": 12,
|
| 11 |
+
"num_hidden_layers": 12,
|
| 12 |
+
"pooler_fc_size": 768,
|
| 13 |
+
"pooler_num_attention_heads": 12,
|
| 14 |
+
"pooler_num_fc_layers": 3,
|
| 15 |
+
"pooler_size_per_head": 128,
|
| 16 |
+
"pooler_type": "first_token_transform",
|
| 17 |
+
"type_vocab_size": 2,
|
| 18 |
+
"vocab_size": 21128
|
| 19 |
+
}
|
chinese_wwm_pytorch/config.json
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"attention_probs_dropout_prob": 0.1,
|
| 3 |
+
"directionality": "bidi",
|
| 4 |
+
"hidden_act": "gelu",
|
| 5 |
+
"hidden_dropout_prob": 0.1,
|
| 6 |
+
"hidden_size": 768,
|
| 7 |
+
"initializer_range": 0.02,
|
| 8 |
+
"intermediate_size": 3072,
|
| 9 |
+
"max_position_embeddings": 512,
|
| 10 |
+
"num_attention_heads": 12,
|
| 11 |
+
"num_hidden_layers": 12,
|
| 12 |
+
"pooler_fc_size": 768,
|
| 13 |
+
"pooler_num_attention_heads": 12,
|
| 14 |
+
"pooler_num_fc_layers": 3,
|
| 15 |
+
"pooler_size_per_head": 128,
|
| 16 |
+
"pooler_type": "first_token_transform",
|
| 17 |
+
"type_vocab_size": 2,
|
| 18 |
+
"vocab_size": 21128,
|
| 19 |
+
"model_type": "bert"
|
| 20 |
+
}
|
chinese_wwm_pytorch/pytorch_model.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:394eb16febe46f087c2789b2f0a23179191fb6cbea4d02f673a9f65220dbf2eb
|
| 3 |
+
size 411578458
|
chinese_wwm_pytorch/vocab.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
chinese_wwm_pytorch/vocab_ori.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
config.py
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
from transformers import BertTokenizer
|
| 3 |
+
|
| 4 |
+
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
| 5 |
+
#device = torch.device("cpu")
|
| 6 |
+
|
| 7 |
+
# Configure training/optimization
|
| 8 |
+
learning_rate = 0.02 #0.002 5e-5
|
| 9 |
+
min_word_freq = 3
|
| 10 |
+
print_every = 100
|
| 11 |
+
chunk_size =3 #100 这个是batchsize
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
# num_labels = 20
|
| 15 |
+
# num_classes = 4 # number of sentimental types
|
| 16 |
+
|
| 17 |
+
# num_labels = 12
|
| 18 |
+
# num_classes =3 # number of sentimental types
|
| 19 |
+
|
| 20 |
+
num_labels = 14
|
| 21 |
+
num_classes =3
|
| 22 |
+
|
| 23 |
+
save_folder = 'models'
|
| 24 |
+
|
| 25 |
+
# Configure models
|
| 26 |
+
start_epoch = 0
|
| 27 |
+
epochs = 150
|
| 28 |
+
hidden_size = 768 #800
|
| 29 |
+
encoder_n_layers = 12 #1
|
| 30 |
+
dropout = 0.1
|
| 31 |
+
batch_first = False
|
| 32 |
+
|
| 33 |
+
filter_sizes = (5, 6, 7)
|
| 34 |
+
# train_folder = 'data/ai_challenger_sentiment_analysis_trainingset_20180816'
|
| 35 |
+
# valid_folder = 'data/ai_challenger_sentiment_analysis_validationset_20180816'
|
| 36 |
+
# test_a_folder = 'data/ai_challenger_sentiment_analysis_testa_20180816'
|
| 37 |
+
#
|
| 38 |
+
# train_filename = 'sentiment_analysis_trainingset.csv'
|
| 39 |
+
# valid_filename = 'sentiment_analysis_validationset.csv'
|
| 40 |
+
# test_a_filename = 'sentiment_analysis_testa2.csv'
|
| 41 |
+
'''自定义训练数据'''
|
| 42 |
+
train_folder = 'data/train'
|
| 43 |
+
valid_folder = 'data/valid'
|
| 44 |
+
test_a_folder = 'data/test'
|
| 45 |
+
|
| 46 |
+
train_filename = 'train_data.csv'
|
| 47 |
+
valid_filename = 'val_data.csv'
|
| 48 |
+
test_a_filename = 'test_data.csv'
|
| 49 |
+
|
| 50 |
+
# label_names = ['location_traffic_convenience', 'location_distance_from_business_district', 'location_easy_to_find',
|
| 51 |
+
# 'service_wait_time', 'service_waiters_attitude', 'service_parking_convenience', 'service_serving_speed',
|
| 52 |
+
# 'price_level', 'price_cost_effective', 'price_discount', 'environment_decoration', 'environment_noise',
|
| 53 |
+
# 'environment_space', 'environment_cleaness', 'dish_portion', 'dish_taste', 'dish_look',
|
| 54 |
+
# 'dish_recommendation',
|
| 55 |
+
# 'others_overall_experience', 'others_willing_to_consume_again']
|
| 56 |
+
# label_names = ['1', '2', '3',
|
| 57 |
+
# '4', '5', '6', '7',
|
| 58 |
+
# '8', '9', '10', '11', '12']
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
# label_names = ['location_traffic_convenience', 'location_distance_from_business_district', 'location_easy_to_find',
|
| 62 |
+
# 'service_wait_time', 'service_waiters_attitude', 'service_parking_convenience', 'service_serving_speed',
|
| 63 |
+
# 'price_level', 'price_cost_effective', 'price_discount', 'environment_decoration', 'environment_noise']
|
| 64 |
+
|
| 65 |
+
label_names = ['标题', '选题角度与价值','文献综述归纳总结情况','论文工作量','是否掌握基础(专业)知识','是否具备科研能力','格式规范','行文表达','逻辑性','研究方法','研究结论','创新性及论文价值','理论深度','学术端正性'] #a b c d e f g h u j k l
|
| 66 |
+
|
| 67 |
+
#assert len(label_names) == 12
|
| 68 |
+
|
| 69 |
+
# Default word tokens
|
| 70 |
+
PAD_token = 0 # Used for padding short sentences
|
| 71 |
+
SOS_token = 1 # Start-of-sentence token
|
| 72 |
+
EOS_token = 2 # End-of-sentence token
|
| 73 |
+
UNK_token = 3
|
| 74 |
+
|
| 75 |
+
train_path='data/train/train_data.csv'
|
| 76 |
+
test_path='data/test/test_data.csv'
|
| 77 |
+
dev_path='data/valid/val_data.csv'
|
| 78 |
+
bert_path='./chinese_wwm_pytorch'
|
| 79 |
+
pad_size = 500 # 每段评语处理成的长度(短填长切)
|
| 80 |
+
sentence_pad_size=100 #每句话处理成的长度(短填长切)
|
| 81 |
+
tokenizer = BertTokenizer.from_pretrained(bert_path)
|
| 82 |
+
hidden_size = 768
|
config2.py
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
from transformers import BertTokenizer
|
| 3 |
+
|
| 4 |
+
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
| 5 |
+
#device = torch.device("cpu")
|
| 6 |
+
|
| 7 |
+
# Configure training/optimization
|
| 8 |
+
learning_rate = 5e-5 #0.002
|
| 9 |
+
min_word_freq = 3
|
| 10 |
+
print_every = 100
|
| 11 |
+
chunk_size =8 #100 这个是batchsize
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
num_labels = 20
|
| 15 |
+
num_classes = 4 # number of sentimental types
|
| 16 |
+
|
| 17 |
+
# num_labels = 12
|
| 18 |
+
# num_classes =3 # number of sentimental types
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
save_folder = 'models'
|
| 22 |
+
|
| 23 |
+
# Configure models
|
| 24 |
+
start_epoch = 0
|
| 25 |
+
epochs = 150
|
| 26 |
+
hidden_size = 768 #800
|
| 27 |
+
encoder_n_layers = 12 #1
|
| 28 |
+
dropout = 0.1
|
| 29 |
+
batch_first = False
|
| 30 |
+
|
| 31 |
+
filter_sizes = (2, 3, 4)
|
| 32 |
+
train_folder = 'data/ai_challenger_sentiment_analysis_trainingset_20180816'
|
| 33 |
+
valid_folder = 'data/ai_challenger_sentiment_analysis_validationset_20180816'
|
| 34 |
+
test_a_folder = 'data/ai_challenger_sentiment_analysis_testa_20180816'
|
| 35 |
+
|
| 36 |
+
train_filename = 'sentiment_analysis_trainingset.csv'
|
| 37 |
+
valid_filename = 'sentiment_analysis_validationset.csv'
|
| 38 |
+
test_a_filename = 'sentiment_analysis_testa2.csv'
|
| 39 |
+
'''自定义训练数据'''
|
| 40 |
+
# train_folder = 'data/train'
|
| 41 |
+
# valid_folder = 'data/valid'
|
| 42 |
+
# test_a_folder = 'data/test'
|
| 43 |
+
#
|
| 44 |
+
# train_filename = 'train_data.csv'
|
| 45 |
+
# valid_filename = 'val_data.csv'
|
| 46 |
+
# test_a_filename = 'test_data.csv'
|
| 47 |
+
|
| 48 |
+
label_names = ['location_traffic_convenience', 'location_distance_from_business_district', 'location_easy_to_find',
|
| 49 |
+
'service_wait_time', 'service_waiters_attitude', 'service_parking_convenience', 'service_serving_speed',
|
| 50 |
+
'price_level', 'price_cost_effective', 'price_discount', 'environment_decoration', 'environment_noise',
|
| 51 |
+
'environment_space', 'environment_cleaness', 'dish_portion', 'dish_taste', 'dish_look',
|
| 52 |
+
'dish_recommendation',
|
| 53 |
+
'others_overall_experience', 'others_willing_to_consume_again']
|
| 54 |
+
# label_names = ['1', '2', '3',
|
| 55 |
+
# '4', '5', '6', '7',
|
| 56 |
+
# '8', '9', '10', '11', '12']
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
# label_names = ['location_traffic_convenience', 'location_distance_from_business_district', 'location_easy_to_find',
|
| 60 |
+
# 'service_wait_time', 'service_waiters_attitude', 'service_parking_convenience', 'service_serving_speed',
|
| 61 |
+
# 'price_level', 'price_cost_effective', 'price_discount', 'environment_decoration', 'environment_noise']
|
| 62 |
+
#assert len(label_names) == 12
|
| 63 |
+
|
| 64 |
+
# Default word tokens
|
| 65 |
+
PAD_token = 0 # Used for padding short sentences
|
| 66 |
+
SOS_token = 1 # Start-of-sentence token
|
| 67 |
+
EOS_token = 2 # End-of-sentence token
|
| 68 |
+
UNK_token = 3
|
| 69 |
+
|
| 70 |
+
train_path='data/train/train_data.csv'
|
| 71 |
+
test_path='data/test/test_data.csv'
|
| 72 |
+
dev_path='data/valid/val_data.csv'
|
| 73 |
+
bert_path='./chinese_wwm_pytorch'
|
| 74 |
+
pad_size = 500 # 每句话处理成的长度(短填长切)
|
| 75 |
+
tokenizer = BertTokenizer.from_pretrained(bert_path)
|
| 76 |
+
hidden_size = 768
|
data/README.txt
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
sentiment_analysis_testa.csv 为测试集A数据文件,共15000条评论数据
|
| 2 |
+
protocol.txt 为数据集下载协议
|
data/WORDMAP.json
ADDED
|
@@ -0,0 +1,2367 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"\u8bba\u6587": 4,
|
| 3 |
+
"\u4ece": 5,
|
| 4 |
+
"\u5b58\u8d27": 6,
|
| 5 |
+
"\u5185\u90e8": 7,
|
| 6 |
+
"\u63a7\u5236": 8,
|
| 7 |
+
"\u7684": 9,
|
| 8 |
+
"\u4e94": 10,
|
| 9 |
+
"\uff0c": 11,
|
| 10 |
+
"\u770b": 12,
|
| 11 |
+
" ": 13,
|
| 12 |
+
"\u8d85\u5e02": 14,
|
| 13 |
+
"\u6574\u4f53": 15,
|
| 14 |
+
"\u73b0\u72b6": 16,
|
| 15 |
+
"\u53d1\u73b0": 17,
|
| 16 |
+
"\u5176": 18,
|
| 17 |
+
"\u5404": 19,
|
| 18 |
+
"\u5b58\u5728": 20,
|
| 19 |
+
"\u95ee\u9898": 21,
|
| 20 |
+
"\u5e76": 22,
|
| 21 |
+
"\u5bf9": 23,
|
| 22 |
+
"\u539f\u56e0": 24,
|
| 23 |
+
"\u8fdb\u884c": 25,
|
| 24 |
+
"\u5206\u6790": 26,
|
| 25 |
+
"\u7ed3\u5408": 27,
|
| 26 |
+
"\u73af\u8282": 28,
|
| 27 |
+
"\u5173\u952e": 29,
|
| 28 |
+
"\u98ce\u9669": 30,
|
| 29 |
+
"\u70b9": 31,
|
| 30 |
+
"\u3001": 32,
|
| 31 |
+
"\u63d0\u51fa": 33,
|
| 32 |
+
"\u5177\u4f53": 34,
|
| 33 |
+
"\u6539\u8fdb": 35,
|
| 34 |
+
"\u5efa\u8bae": 36,
|
| 35 |
+
"\u3002": 37,
|
| 36 |
+
"\n": 38,
|
| 37 |
+
"\u5206\u7c7b": 39,
|
| 38 |
+
"\u89d2\u5ea6": 40,
|
| 39 |
+
"\u53bb": 41,
|
| 40 |
+
"\u91cd\u70b9": 42,
|
| 41 |
+
"\u9009\u9898": 43,
|
| 42 |
+
"\u6070\u5f53": 44,
|
| 43 |
+
"\u7ed3\u6784\u5408\u7406": 45,
|
| 44 |
+
"\u8bba\u8ff0": 46,
|
| 45 |
+
"\u6e05\u6670": 47,
|
| 46 |
+
"\u5199\u4f5c": 48,
|
| 47 |
+
"\u89c4\u8303": 49,
|
| 48 |
+
"\u8fbe\u5230": 50,
|
| 49 |
+
"\u7855\u58eb": 51,
|
| 50 |
+
"\u6bd5\u4e1a": 52,
|
| 51 |
+
"\u6c34\u5e73": 53,
|
| 52 |
+
"\u603b\u7ed3": 54,
|
| 53 |
+
"\u4e86": 55,
|
| 54 |
+
"*": 56,
|
| 55 |
+
"ST": 57,
|
| 56 |
+
"\u7c7b": 58,
|
| 57 |
+
"\u4f01\u4e1a": 59,
|
| 58 |
+
"\u60c5\u51b5": 60,
|
| 59 |
+
"\u5168\u9762": 61,
|
| 60 |
+
"\u68b3\u7406": 62,
|
| 61 |
+
"\u88ab": 63,
|
| 62 |
+
"\u5728": 64,
|
| 63 |
+
"\u8fc7\u7a0b": 65,
|
| 64 |
+
"\u4e2d": 66,
|
| 65 |
+
"\u9010\u6e10": 67,
|
| 66 |
+
"\uff1b": 68,
|
| 67 |
+
"\u91c7\u53d6": 69,
|
| 68 |
+
"\u591a\u79cd": 70,
|
| 69 |
+
"\u8d22\u52a1": 71,
|
| 70 |
+
"\u624b\u6bb5": 72,
|
| 71 |
+
"\u4ecd": 73,
|
| 72 |
+
"\u672a": 74,
|
| 73 |
+
"\u4f7f": 75,
|
| 74 |
+
"\u540e": 76,
|
| 75 |
+
"\u91cd\u6574": 77,
|
| 76 |
+
"\u91c7\u7528": 78,
|
| 77 |
+
"\u4ee5": 79,
|
| 78 |
+
"\u5e02\u573a": 80,
|
| 79 |
+
"\u4e3a": 81,
|
| 80 |
+
"\u884c\u4e1a": 82,
|
| 81 |
+
"\u4ef7\u503c": 83,
|
| 82 |
+
"\u8d44\u4ea7": 84,
|
| 83 |
+
"\u7f6e": 85,
|
| 84 |
+
"\u51fa": 86,
|
| 85 |
+
"\u548c": 87,
|
| 86 |
+
"\u4ee5\u53ca": 88,
|
| 87 |
+
"\u503a\u8f6c\u80a1": 89,
|
| 88 |
+
"\u7b49": 90,
|
| 89 |
+
"\u6210\u529f": 91,
|
| 90 |
+
"\u6700\u540e": 92,
|
| 91 |
+
"\u5229\u76ca": 93,
|
| 92 |
+
"\u53ca": 94,
|
| 93 |
+
"\u6548\u5e94": 95,
|
| 94 |
+
"\u5f71\u54cd": 96,
|
| 95 |
+
"\u51fa\u53d1": 97,
|
| 96 |
+
"\u8bc4\u4ef7": 98,
|
| 97 |
+
"\u7ecf\u6d4e": 99,
|
| 98 |
+
"\u8fd0\u7528": 100,
|
| 99 |
+
"\u4e8b\u4ef6": 101,
|
| 100 |
+
"\u7814\u7a76\u6cd5": 102,
|
| 101 |
+
"\u524d\u540e": 103,
|
| 102 |
+
"\u53cd\u5e94": 104,
|
| 103 |
+
"\u8fd9": 105,
|
| 104 |
+
"\u4e00\u7cfb\u5217": 106,
|
| 105 |
+
"\u884c\u4e3a": 107,
|
| 106 |
+
"\u90fd": 108,
|
| 107 |
+
"\u901a\u8fc7": 109,
|
| 108 |
+
"\u7814\u7a76": 110,
|
| 109 |
+
"\u53ef\u4ee5": 111,
|
| 110 |
+
"\u8d44\u672c": 112,
|
| 111 |
+
"\u63d0\u4f9b": 113,
|
| 112 |
+
"\u501f\u9274": 114,
|
| 113 |
+
"\u7ed3\u6784": 115,
|
| 114 |
+
"\u8f83": 116,
|
| 115 |
+
"\u5408\u7406": 117,
|
| 116 |
+
"\u903b\u8f91": 118,
|
| 117 |
+
"\u4e25\u5bc6": 119,
|
| 118 |
+
"\u5e7f\u6cdb": 120,
|
| 119 |
+
"\u9605\u8bfb": 121,
|
| 120 |
+
"\u76f8\u5173": 122,
|
| 121 |
+
"\u6587\u732e": 123,
|
| 122 |
+
"\u5de5\u4f5c\u91cf": 124,
|
| 123 |
+
"\u9971\u6ee1": 125,
|
| 124 |
+
"\u7b26\u5408": 126,
|
| 125 |
+
"\u7855\u58eb\u8bba\u6587": 127,
|
| 126 |
+
"\u8981\u6c42": 128,
|
| 127 |
+
"\u8be5": 129,
|
| 128 |
+
"\u4e3b\u8981": 130,
|
| 129 |
+
"\u5de5\u4f5c": 131,
|
| 130 |
+
"\u5982\u4e0b": 132,
|
| 131 |
+
"\uff1a": 133,
|
| 132 |
+
"1": 134,
|
| 133 |
+
"\u9488\u5bf9": 135,
|
| 134 |
+
"\u578b": 136,
|
| 135 |
+
"\u7f51": 137,
|
| 136 |
+
"\u9006\u53d8\u5668": 138,
|
| 137 |
+
"\u8c10\u632f": 139,
|
| 138 |
+
"\u5904": 140,
|
| 139 |
+
"\u7535\u5bb9": 141,
|
| 140 |
+
"\u7535\u6d41": 142,
|
| 141 |
+
"\u53cd\u9988": 143,
|
| 142 |
+
"\u4e0e": 144,
|
| 143 |
+
"\u53cc": 145,
|
| 144 |
+
"\u6709\u6e90": 146,
|
| 145 |
+
"\u963b\u5c3c": 147,
|
| 146 |
+
"\u6cd5": 148,
|
| 147 |
+
"\u6291\u5236": 149,
|
| 148 |
+
"\u524d": 150,
|
| 149 |
+
"\u7ea7": 151,
|
| 150 |
+
"\u7535\u538b": 152,
|
| 151 |
+
"\u6ce2\u52a8": 153,
|
| 152 |
+
"\u52a0\u5165": 154,
|
| 153 |
+
"PI": 155,
|
| 154 |
+
"\u57fa\u4e8e": 156,
|
| 155 |
+
"\u4ee3\u8868": 157,
|
| 156 |
+
"\u4f20\u7edf": 158,
|
| 157 |
+
"\u63a7\u5236\u7b56\u7565": 159,
|
| 158 |
+
"2": 160,
|
| 159 |
+
"\u7cfb\u7edf": 161,
|
| 160 |
+
"\u975e\u7ebf\u6027": 162,
|
| 161 |
+
"\u81ea\u6297\u6270": 163,
|
| 162 |
+
"\u7b97\u6cd5": 164,
|
| 163 |
+
"\u5f85": 165,
|
| 164 |
+
"\u53c2\u6570": 166,
|
| 165 |
+
"\u591a": 167,
|
| 166 |
+
"\u7ebf\u6027": 168,
|
| 167 |
+
"\u63a7\u5236\u7b97\u6cd5": 169,
|
| 168 |
+
"\uff08": 170,
|
| 169 |
+
"\uff09": 171,
|
| 170 |
+
"\u63a8\u5bfc": 172,
|
| 171 |
+
"3": 173,
|
| 172 |
+
"\u5c06": 174,
|
| 173 |
+
"\u81ea": 175,
|
| 174 |
+
"\u63a7\u5236\u6280\u672f": 176,
|
| 175 |
+
"\u8bbe\u8ba1": 177,
|
| 176 |
+
"\u63a7\u5236\u5668": 178,
|
| 177 |
+
"\u6765\u6e90\u4e8e": 179,
|
| 178 |
+
"\u5b9e\u8df5": 180,
|
| 179 |
+
"\u6709": 181,
|
| 180 |
+
"\u4e00\u5b9a": 182,
|
| 181 |
+
"\u5b9e\u9645": 183,
|
| 182 |
+
"\u4f7f\u7528": 184,
|
| 183 |
+
"\u5f52\u7eb3": 185,
|
| 184 |
+
"\u6280\u672f": 186,
|
| 185 |
+
"\u65b9\u9762": 187,
|
| 186 |
+
"\u5177\u6709": 188,
|
| 187 |
+
"\u65b0\u9896\u6027": 189,
|
| 188 |
+
"\u6240": 190,
|
| 189 |
+
"\u65b9\u6cd5": 191,
|
| 190 |
+
"\u5b9e\u7528\u6027": 192,
|
| 191 |
+
"\u56fe\u8868": 193,
|
| 192 |
+
"\u5c42\u6b21\u5206\u660e": 194,
|
| 193 |
+
"\u6587\u5b57": 195,
|
| 194 |
+
"\u901a\u987a": 196,
|
| 195 |
+
"\u8bf4\u660e": 197,
|
| 196 |
+
"\u8be5\u751f": 198,
|
| 197 |
+
"\u624e\u5b9e": 199,
|
| 198 |
+
"\u7406\u8bba": 200,
|
| 199 |
+
"\u57fa\u7840": 201,
|
| 200 |
+
"\u5f3a": 202,
|
| 201 |
+
"\u79d1\u7814": 203,
|
| 202 |
+
"\u80fd\u529b": 204,
|
| 203 |
+
"\u4e13\u4e1a": 205,
|
| 204 |
+
"\u6c34\u51c6": 206,
|
| 205 |
+
"\u540c\u610f": 207,
|
| 206 |
+
"\u7b54\u8fa9": 208,
|
| 207 |
+
"\u6211\u56fd": 209,
|
| 208 |
+
"\u7535\u7f51": 210,
|
| 209 |
+
"\u5206\u5e03": 211,
|
| 210 |
+
"\u8f93\u7535": 212,
|
| 211 |
+
"\u7ebf\u8def": 213,
|
| 212 |
+
"\u5bb9\u6613": 214,
|
| 213 |
+
"\u906d\u53d7": 215,
|
| 214 |
+
"\u5371\u5bb3": 216,
|
| 215 |
+
"\u914d\u7535": 217,
|
| 216 |
+
"\u4f5c\u7528": 218,
|
| 217 |
+
"\u7531\u4e8e": 219,
|
| 218 |
+
"\u9020\u6210": 220,
|
| 219 |
+
"\u4e8b\u6545": 221,
|
| 220 |
+
"\u9ad8": 222,
|
| 221 |
+
"\u5de6\u53f3": 223,
|
| 222 |
+
"\u4e25\u91cd": 224,
|
| 223 |
+
"\u4f9b\u7535": 225,
|
| 224 |
+
"\u53ef\u9760\u6027": 226,
|
| 225 |
+
"\u6240\u4ee5": 227,
|
| 226 |
+
"\u91cd\u8981": 228,
|
| 227 |
+
"\u610f\u4e49": 229,
|
| 228 |
+
"\u5efa\u7acb": 230,
|
| 229 |
+
"\u6570\u5b66\u6a21\u578b": 231,
|
| 230 |
+
"\u4e24": 232,
|
| 231 |
+
"\u5f97\u51fa": 233,
|
| 232 |
+
"\u63aa\u65bd": 234,
|
| 233 |
+
"\u73b0\u5b9e\u610f\u4e49": 235,
|
| 234 |
+
"\u7814\u7a76\u6210\u679c": 236,
|
| 235 |
+
"\u5e94\u7528": 237,
|
| 236 |
+
"\u7ed3\u679c\u8868\u660e": 238,
|
| 237 |
+
"\u4f5c\u8005": 239,
|
| 238 |
+
"\u638c\u63e1": 240,
|
| 239 |
+
"\u4e13\u4e1a\u77e5\u8bc6": 241,
|
| 240 |
+
"\u4ece\u4e8b": 242,
|
| 241 |
+
"\u79d1\u7814\u5de5\u4f5c": 243,
|
| 242 |
+
"\u7b26\u5408\u89c4\u8303": 244,
|
| 243 |
+
"\u8bed\u53e5": 245,
|
| 244 |
+
"\u57fa\u672c": 246,
|
| 245 |
+
"\u8ba4\u771f": 247,
|
| 246 |
+
"\u4fee\u6539": 248,
|
| 247 |
+
"\u53c2\u52a0": 249,
|
| 248 |
+
"\u914d\u7535\u7f51": 250,
|
| 249 |
+
"\u968f\u7740": 251,
|
| 250 |
+
"\u8d1f\u8377": 252,
|
| 251 |
+
"\u4e0d\u65ad": 253,
|
| 252 |
+
"\u589e\u5927": 254,
|
| 253 |
+
"\u5f00\u5173\u67dc": 255,
|
| 254 |
+
"\u8bbe\u5907": 256,
|
| 255 |
+
"\u6570\u91cf": 257,
|
| 256 |
+
"\u589e\u52a0": 258,
|
| 257 |
+
"\u662f": 259,
|
| 258 |
+
"\u7edd\u7f18": 260,
|
| 259 |
+
"\u56e0\u6b64": 261,
|
| 260 |
+
"\u8d85\u58f0\u6ce2": 262,
|
| 261 |
+
"\u5730": 263,
|
| 262 |
+
"\u68c0\u6d4b": 264,
|
| 263 |
+
"\u5c40\u90e8": 265,
|
| 264 |
+
"\u653e\u7535": 266,
|
| 265 |
+
"\u539f\u7406": 267,
|
| 266 |
+
"\u53ca\u5176": 268,
|
| 267 |
+
"\u5b9e\u73b0": 269,
|
| 268 |
+
"\u4ecb\u7ecd": 270,
|
| 269 |
+
"\u5982\u4f55": 271,
|
| 270 |
+
"\u5229\u7528": 272,
|
| 271 |
+
"\u67d0": 273,
|
| 272 |
+
"\u4e8c\u7ef4": 274,
|
| 273 |
+
"\u6709\u9650\u5143": 275,
|
| 274 |
+
"\u7535\u573a": 276,
|
| 275 |
+
"\u6a21\u578b": 277,
|
| 276 |
+
"\u627e\u51fa": 278,
|
| 277 |
+
"\u89c2\u6d4b": 279,
|
| 278 |
+
"\u51fa\u73b0": 280,
|
| 279 |
+
"\u8fc7": 281,
|
| 280 |
+
"\u6848\u4f8b": 282,
|
| 281 |
+
"\u5224\u65ad": 283,
|
| 282 |
+
"\u6545\u969c": 284,
|
| 283 |
+
"\u7c7b\u578b": 285,
|
| 284 |
+
"\u8d77\u5230": 286,
|
| 285 |
+
"\u53cd\u6620": 287,
|
| 286 |
+
"\u7814\u7a76\u751f": 288,
|
| 287 |
+
"\u57fa\u7840\u77e5\u8bc6": 289,
|
| 288 |
+
"\u690d\u4fdd": 290,
|
| 289 |
+
"\u673a\u68b0": 291,
|
| 290 |
+
"\u55b7\u836f": 292,
|
| 291 |
+
"\u6548\u7387": 293,
|
| 292 |
+
"\u4f4e": 294,
|
| 293 |
+
"\u6548\u679c": 295,
|
| 294 |
+
"\u5dee": 296,
|
| 295 |
+
"\u673a": 297,
|
| 296 |
+
"\u836f\u6db2": 298,
|
| 297 |
+
"\u6d41\u91cf": 299,
|
| 298 |
+
"\u76d1\u63a7": 300,
|
| 299 |
+
"\u7cbe\u51c6": 301,
|
| 300 |
+
"\u63a7\u5236\u7cfb\u7edf": 302,
|
| 301 |
+
"\u9700\u6c42": 303,
|
| 302 |
+
"\u65b9\u6848": 304,
|
| 303 |
+
"\u9009\u7528": 305,
|
| 304 |
+
"-": 306,
|
| 305 |
+
"\u4f5c\u4e3a": 307,
|
| 306 |
+
"\u5d4c\u5165\u5f0f": 308,
|
| 307 |
+
"\u8f6f\u4ef6": 309,
|
| 308 |
+
"\u5f00\u53d1": 310,
|
| 309 |
+
"\u73af\u5883": 311,
|
| 310 |
+
"\u5b8c\u6210": 312,
|
| 311 |
+
"\u901a\u4fe1": 313,
|
| 312 |
+
"\u6a21\u7cca": 314,
|
| 313 |
+
"PID": 315,
|
| 314 |
+
"\u529f\u80fd\u6a21\u5757": 316,
|
| 315 |
+
"\u7f16\u5199": 317,
|
| 316 |
+
"\u4e0b": 318,
|
| 317 |
+
"\u6d4b\u8bd5": 319,
|
| 318 |
+
"\u529f\u80fd": 320,
|
| 319 |
+
"\u6b63\u5e38": 321,
|
| 320 |
+
"\u4e0d\u540c": 322,
|
| 321 |
+
"\u5de5\u51b5": 323,
|
| 322 |
+
"\u6027\u80fd": 324,
|
| 323 |
+
"\u597d": 325,
|
| 324 |
+
"\u6761\u7406": 326,
|
| 325 |
+
"\u6bd4\u8f83\u6e05\u695a": 327,
|
| 326 |
+
"\u6b63\u786e": 328,
|
| 327 |
+
"\u53d6\u5f97": 329,
|
| 328 |
+
",": 330,
|
| 329 |
+
"\u8868\u660e": 331,
|
| 330 |
+
"\u57fa\u7840\u7406\u8bba": 332,
|
| 331 |
+
"\u7f16\u7a0b": 333,
|
| 332 |
+
"\u8f83\u5f3a": 334,
|
| 333 |
+
"\u8bba\u6587\u7b54\u8fa9": 335,
|
| 334 |
+
"\u9009\u62e9": 336,
|
| 335 |
+
"\u6c22": 337,
|
| 336 |
+
"\u975e\u6676": 338,
|
| 337 |
+
"\u5408\u91d1": 339,
|
| 338 |
+
"\u529b\u5b66\u6027\u80fd": 340,
|
| 339 |
+
"\u5bf9\u8c61": 341,
|
| 340 |
+
"\u6df1\u5165": 342,
|
| 341 |
+
"\u4e86\u89e3": 343,
|
| 342 |
+
"\u5fae\u89c2": 344,
|
| 343 |
+
"Zr": 345,
|
| 344 |
+
"\u57fa": 346,
|
| 345 |
+
"\u5757\u4f53": 347,
|
| 346 |
+
"\u7b49\u79bb\u5b50": 348,
|
| 347 |
+
"\u7535\u5f27": 349,
|
| 348 |
+
"\u5236\u5907": 350,
|
| 349 |
+
"\u542b\u91cf": 351,
|
| 350 |
+
"\u4e09\u79cd": 352,
|
| 351 |
+
"\u5178\u578b": 353,
|
| 352 |
+
"\u7eb3\u7c73": 354,
|
| 353 |
+
"\u53d8\u5f62": 355,
|
| 354 |
+
"\u5ba4\u6e29": 356,
|
| 355 |
+
"\u9ad8\u6e29": 357,
|
| 356 |
+
"\u538b\u7f29": 358,
|
| 357 |
+
"\u89c4\u5f8b": 359,
|
| 358 |
+
"\u4ea7\u751f": 360,
|
| 359 |
+
"\u5f97\u5230": 361,
|
| 360 |
+
"\u4e00\u4e9b": 362,
|
| 361 |
+
"\u7ed3\u8bba": 363,
|
| 362 |
+
"\u672c\u5b66\u79d1": 364,
|
| 363 |
+
"\u51c6\u786e": 365,
|
| 364 |
+
"\u9002\u5f53": 366,
|
| 365 |
+
"\u8868\u5f81": 367,
|
| 366 |
+
"\u6dfb\u52a0": 368,
|
| 367 |
+
"\u8868\u73b0": 369,
|
| 368 |
+
"\u6db2\u76f8": 370,
|
| 369 |
+
"\u533a": 371,
|
| 370 |
+
"\u5bbd\u5ea6": 372,
|
| 371 |
+
"\u51e0\u4e2a": 373,
|
| 372 |
+
"\u6027\u8d28": 374,
|
| 373 |
+
"\u521b\u65b0\u6027": 375,
|
| 374 |
+
"\u6d41\u7545": 376,
|
| 375 |
+
"\u8868\u8ff0": 377,
|
| 376 |
+
"\u4e25\u8c28": 378,
|
| 377 |
+
"\u672c": 379,
|
| 378 |
+
"Mg": 380,
|
| 379 |
+
"(": 381,
|
| 380 |
+
"+": 382,
|
| 381 |
+
"0.5": 383,
|
| 382 |
+
"x": 384,
|
| 383 |
+
")": 385,
|
| 384 |
+
"Y": 386,
|
| 385 |
+
"\u7cfb": 387,
|
| 386 |
+
"\u70ed\u88c2": 388,
|
| 387 |
+
"\u654f\u611f\u6027": 389,
|
| 388 |
+
"\u9884\u6d4b": 390,
|
| 389 |
+
"\u6536\u96c6": 391,
|
| 390 |
+
"\u4e24\u79cd": 392,
|
| 391 |
+
"\u51dd\u56fa": 393,
|
| 392 |
+
"\u5e76\u4e14": 394,
|
| 393 |
+
"\u8def\u5f84": 395,
|
| 394 |
+
"\u56fa\u76f8": 396,
|
| 395 |
+
"\u5206\u6570": 397,
|
| 396 |
+
"\u6e29\u5ea6": 398,
|
| 397 |
+
"\u533a\u57df": 399,
|
| 398 |
+
"\u8303\u56f4": 400,
|
| 399 |
+
"\u7279\u5f81": 401,
|
| 400 |
+
"\u540c\u65f6": 402,
|
| 401 |
+
"\u76f8": 403,
|
| 402 |
+
"\u56e0\u7d20": 404,
|
| 403 |
+
"\u5e94\u529b": 405,
|
| 404 |
+
"\u8f83\u4e3a": 406,
|
| 405 |
+
"\u903b\u8f91\u6027": 407,
|
| 406 |
+
"\u80fd\u591f": 408,
|
| 407 |
+
"\u89e3\u51b3\u95ee\u9898": 409,
|
| 408 |
+
"\u4e00\u7bc7": 410,
|
| 409 |
+
"\u4f18\u79c0": 411,
|
| 410 |
+
"\u7855\u58eb\u5b66\u4f4d": 412,
|
| 411 |
+
"\u94f6\u884c": 413,
|
| 412 |
+
"\u5f53\u524d": 414,
|
| 413 |
+
"\u4e00\u4e2a": 415,
|
| 414 |
+
"\u6bd4\u8f83": 416,
|
| 415 |
+
"\u53d1\u5c55": 417,
|
| 416 |
+
"\u5065\u5eb7": 418,
|
| 417 |
+
"\u5b9e\u8bc1": 419,
|
| 418 |
+
"\u9700\u8981": 420,
|
| 419 |
+
"\u6587\u7ae0": 421,
|
| 420 |
+
"\u4e5f": 422,
|
| 421 |
+
"\u7a0b\u5ea6": 423,
|
| 422 |
+
"\u4e0a": 424,
|
| 423 |
+
"\u4f53\u73b0": 425,
|
| 424 |
+
"\u5bf9\u672c": 426,
|
| 425 |
+
"\u9886\u57df": 427,
|
| 426 |
+
"\u5b66\u79d1": 428,
|
| 427 |
+
"\u57fa\u672c\u77e5\u8bc6": 429,
|
| 428 |
+
"\u73b0\u5b9e": 430,
|
| 429 |
+
"\u6570\u636e": 431,
|
| 430 |
+
"\u5904\u7406": 432,
|
| 431 |
+
"\u4f46\u662f": 433,
|
| 432 |
+
"\u89c4\u8303\u6027": 434,
|
| 433 |
+
"\u5b66\u672f": 435,
|
| 434 |
+
"\u4e25\u8c28\u6027": 436,
|
| 435 |
+
"\u8fd8": 437,
|
| 436 |
+
"\u8fdb\u4e00\u6b65": 438,
|
| 437 |
+
"\u8be6\u7ec6": 439,
|
| 438 |
+
"\u610f\u89c1": 440,
|
| 439 |
+
"\u4e0d\u8db3": 441,
|
| 440 |
+
"\u65b0": 442,
|
| 441 |
+
"\u300a": 443,
|
| 442 |
+
"\u300b": 444,
|
| 443 |
+
"\u56fd\u5bb6": 445,
|
| 444 |
+
"\u5c42\u9762": 446,
|
| 445 |
+
"\u8d8a\u6765\u8d8a": 447,
|
| 446 |
+
"\u91cd\u89c6": 448,
|
| 447 |
+
"\u7ee9\u6548\u8bc4\u4ef7": 449,
|
| 448 |
+
"\u591a\u4e2a": 450,
|
| 449 |
+
"\u5173\u4e8e": 451,
|
| 450 |
+
"\u73af\u5883\u4fdd\u62a4": 452,
|
| 451 |
+
"\u5f00\u59cb": 453,
|
| 452 |
+
"\u666e\u904d": 454,
|
| 453 |
+
"\u5173\u6ce8": 455,
|
| 454 |
+
"\u4e3a\u4e86": 456,
|
| 455 |
+
"\u63d0\u9ad8": 457,
|
| 456 |
+
"\u8d28\u91cf": 458,
|
| 457 |
+
"\u5927\u91cf": 459,
|
| 458 |
+
"\u4e13\u9879\u8d44\u91d1": 460,
|
| 459 |
+
"\u5236\u5b9a": 461,
|
| 460 |
+
"\u4e00\u5957": 462,
|
| 461 |
+
"\u6709\u6548": 463,
|
| 462 |
+
"\u663e\u5f97": 464,
|
| 463 |
+
"\u201c": 465,
|
| 464 |
+
"MD": 466,
|
| 465 |
+
"\u68ee\u5de5": 467,
|
| 466 |
+
"\u516c\u53f8": 468,
|
| 467 |
+
"\u4fdd\u62a4": 469,
|
| 468 |
+
"\u5de5\u7a0b": 470,
|
| 469 |
+
"\u201d": 471,
|
| 470 |
+
"\u8c03\u67e5": 472,
|
| 471 |
+
"\u56fd\u5185\u5916": 473,
|
| 472 |
+
"\u6982\u5ff5": 474,
|
| 473 |
+
"\u754c\u5b9a": 475,
|
| 474 |
+
"\u6307\u6807\u4f53\u7cfb": 476,
|
| 475 |
+
"\u4f53\u7cfb": 477,
|
| 476 |
+
"\u4fdd\u969c": 478,
|
| 477 |
+
"\u575a\u5b9e": 479,
|
| 478 |
+
"\u5b66\u79d1\u77e5\u8bc6": 480,
|
| 479 |
+
"\u5177\u5907": 481,
|
| 480 |
+
"\u72ec\u7acb": 482,
|
| 481 |
+
"\u79d1\u5b66\u7814\u7a76": 483,
|
| 482 |
+
"\u673a\u6784": 484,
|
| 483 |
+
"\u6218\u7565": 485,
|
| 484 |
+
"\u4e00\u6587": 486,
|
| 485 |
+
"\u4e4b\u95f4": 487,
|
| 486 |
+
"\u5173\u7cfb": 488,
|
| 487 |
+
"2017": 489,
|
| 488 |
+
"\u5e74": 490,
|
| 489 |
+
"\u9009\u53d6": 491,
|
| 490 |
+
"\u7535\u5b50": 492,
|
| 491 |
+
"\u4e09\u4e2a": 493,
|
| 492 |
+
"\u5bf9\u4e8e": 494,
|
| 493 |
+
"\u663e\u793a": 495,
|
| 494 |
+
"\u8bba\u8bc1": 496,
|
| 495 |
+
"\u6c38\u78c1": 497,
|
| 496 |
+
"\u540c\u6b65\u7535\u673a": 498,
|
| 497 |
+
"\u975e\u5e38": 499,
|
| 498 |
+
"\u672c\u6587": 500,
|
| 499 |
+
"\u516d\u76f8": 501,
|
| 500 |
+
"PMSM": 502,
|
| 501 |
+
"\u8f6c\u6362": 503,
|
| 502 |
+
"\u8026": 504,
|
| 503 |
+
"\u5750\u6807\u7cfb": 505,
|
| 504 |
+
"\u78c1\u573a": 506,
|
| 505 |
+
"\u5b9a\u5411": 507,
|
| 506 |
+
"MATLAB": 508,
|
| 507 |
+
"/": 509,
|
| 508 |
+
"Simulink": 510,
|
| 509 |
+
"\u642d\u5efa": 511,
|
| 510 |
+
"\u4eff\u771f": 512,
|
| 511 |
+
"\u9a8c\u8bc1": 513,
|
| 512 |
+
"\u5176\u6b21": 514,
|
| 513 |
+
"\u5b9a\u5b50": 515,
|
| 514 |
+
"\u7ed5\u7ec4": 516,
|
| 515 |
+
"\u65f6": 517,
|
| 516 |
+
"\u975e": 518,
|
| 517 |
+
"\u6b63\u4ea4": 519,
|
| 518 |
+
"\u79cd": 520,
|
| 519 |
+
"\u5207\u6362": 521,
|
| 520 |
+
"\u5230": 522,
|
| 521 |
+
"\u8865\u507f": 523,
|
| 522 |
+
"\u5b9e\u9a8c": 524,
|
| 523 |
+
"\u518d\u6b21": 525,
|
| 524 |
+
"\u6ed1": 526,
|
| 525 |
+
"\u5f15\u5165": 527,
|
| 526 |
+
"\u65b0\u578b": 528,
|
| 527 |
+
"\u6ed1\u6a21": 529,
|
| 528 |
+
"\u901f\u5ea6": 530,
|
| 529 |
+
"\u9a71\u52a8": 531,
|
| 530 |
+
"\u6a21\u5757": 532,
|
| 531 |
+
"PWM": 533,
|
| 532 |
+
"\u4f5c\u51fa": 534,
|
| 533 |
+
"\u8868\u8fbe": 535,
|
| 534 |
+
"\u6846\u67b6": 536,
|
| 535 |
+
"\u4f46": 537,
|
| 536 |
+
"\u4e0d": 538,
|
| 537 |
+
"\u7a81\u51fa": 539,
|
| 538 |
+
"\u5c0f": 540,
|
| 539 |
+
"\u6ce2\u5305": 541,
|
| 540 |
+
"\u53d8\u6362": 542,
|
| 541 |
+
"\u9707\u52a8": 543,
|
| 542 |
+
"\u7279\u6027": 544,
|
| 543 |
+
"\u7a0b": 545,
|
| 544 |
+
"\u7a7a\u95f4": 546,
|
| 545 |
+
"\u76f8\u5173\u6027": 547,
|
| 546 |
+
"\u6a21\u62df": 548,
|
| 547 |
+
"\u573a": 549,
|
| 548 |
+
"\u5c55\u5f00": 550,
|
| 549 |
+
"\u7406\u8bba\u610f\u4e49": 551,
|
| 550 |
+
"\u65f6\u53d8": 552,
|
| 551 |
+
"\u529f\u7387": 553,
|
| 552 |
+
"\u5b9e\u6d4b": 554,
|
| 553 |
+
"\u8bb0\u5f55": 555,
|
| 554 |
+
"\u5e73\u53f0": 556,
|
| 555 |
+
"\u6027": 557,
|
| 556 |
+
"\u91cd\u6784": 558,
|
| 557 |
+
"\u4e00\u79cd": 559,
|
| 558 |
+
"\u76ee\u6807": 560,
|
| 559 |
+
"\u52a0\u901f\u5ea6": 561,
|
| 560 |
+
"\u7b97\u4f8b": 562,
|
| 561 |
+
"\u6b63\u786e\u6027": 563,
|
| 562 |
+
"\u8f93\u5165": 564,
|
| 563 |
+
"\u5386\u53f2": 565,
|
| 564 |
+
"\u5206\u89e3": 566,
|
| 565 |
+
"\u5206": 567,
|
| 566 |
+
"\u4fe1\u53f7": 568,
|
| 567 |
+
"\u8c03\u6574": 569,
|
| 568 |
+
"\u518d": 570,
|
| 569 |
+
"\u6210\u679c": 571,
|
| 570 |
+
"\u5b9e\u4f8b": 572,
|
| 571 |
+
"\u8ba1\u7b97": 573,
|
| 572 |
+
"\u77e5\u8bc6": 574,
|
| 573 |
+
"4": 575,
|
| 574 |
+
"\u4e00\u822c": 576,
|
| 575 |
+
"\u6d77\u6d0b": 577,
|
| 576 |
+
"\u6cb9\u6c14": 578,
|
| 577 |
+
"\u8d44\u6e90": 579,
|
| 578 |
+
"\u5171\u540c\u5f00\u53d1": 580,
|
| 579 |
+
"\u5165\u624b": 581,
|
| 580 |
+
"\u9610\u8ff0": 582,
|
| 581 |
+
"\u53d9\u8ff0": 583,
|
| 582 |
+
"\u56fd\u9645": 584,
|
| 583 |
+
"\u793e\u4f1a": 585,
|
| 584 |
+
"\u7acb\u6cd5": 586,
|
| 585 |
+
"\u73b0\u6709": 587,
|
| 586 |
+
"\u56fd\u5916": 588,
|
| 587 |
+
"\u5305\u62ec": 589,
|
| 588 |
+
"\u77f3\u6cb9": 590,
|
| 589 |
+
"\u5236\u5ea6": 591,
|
| 590 |
+
"\u7ecf\u9a8c": 592,
|
| 591 |
+
"\u6cd5\u5f8b": 593,
|
| 592 |
+
"\u5b8c\u5584": 594,
|
| 593 |
+
"\u5168\u6587": 595,
|
| 594 |
+
"\u6e05\u695a": 596,
|
| 595 |
+
"\u8d44\u6599": 597,
|
| 596 |
+
"\u4e30\u5bcc": 598,
|
| 597 |
+
"\u4e0d\u5c11": 599,
|
| 598 |
+
"\u5408\u683c": 600,
|
| 599 |
+
"\u6bd5\u4e1a\u8bba\u6587": 601,
|
| 600 |
+
"\u6fc0\u5149": 602,
|
| 601 |
+
"\u4fe1\u9053": 603,
|
| 602 |
+
"\u7f16\u7801": 604,
|
| 603 |
+
"\u8c03\u5236": 605,
|
| 604 |
+
"\u77e9\u9635": 606,
|
| 605 |
+
"\u6784\u9020": 607,
|
| 606 |
+
"\u8f6f": 608,
|
| 607 |
+
"\u5b9e\u7269": 609,
|
| 608 |
+
"\u524d\u666f": 610,
|
| 609 |
+
"\u80e1\u4e54\u6728": 611,
|
| 610 |
+
"\u4e0d\u4ec5": 612,
|
| 611 |
+
"\u9a6c\u514b\u601d\u4e3b\u4e49": 613,
|
| 612 |
+
"\u601d\u60f3": 614,
|
| 613 |
+
"\u6587\u5316": 615,
|
| 614 |
+
"\u5ba3\u4f20": 616,
|
| 615 |
+
"\u76ee\u524d": 617,
|
| 616 |
+
"\u5f88\u591a": 618,
|
| 617 |
+
"\u96c6\u4e2d": 619,
|
| 618 |
+
"\u6216": 620,
|
| 619 |
+
"\u6bdb\u6cfd\u4e1c\u601d\u60f3": 621,
|
| 620 |
+
"\u5ef6\u5b89": 622,
|
| 621 |
+
"\u65f6\u671f": 623,
|
| 622 |
+
"\u7279\u5b9a": 624,
|
| 623 |
+
"\u4e0d\u662f": 625,
|
| 624 |
+
"\u5386\u53f2\u80cc\u666f": 626,
|
| 625 |
+
"\u601d\u8def": 627,
|
| 626 |
+
"\u6761\u4ef6": 628,
|
| 627 |
+
"\u8d21\u732e": 629,
|
| 628 |
+
"\u9996\u5148": 630,
|
| 629 |
+
"\u80cc\u666f": 631,
|
| 630 |
+
"\u5185\u5bb9": 632,
|
| 631 |
+
"\u5b9e\u8df5\u6027": 633,
|
| 632 |
+
"\u89c2\u70b9": 634,
|
| 633 |
+
"\u7ed3\u6784\u8bbe\u8ba1": 635,
|
| 634 |
+
"\u5c42\u6b21": 636,
|
| 635 |
+
"\u5f15\u6587": 637,
|
| 636 |
+
"\u5145\u5206": 638,
|
| 637 |
+
"\u5e94": 639,
|
| 638 |
+
"\u63d0\u4ea4": 640,
|
| 639 |
+
"\u70ed\u7535": 641,
|
| 640 |
+
"\u8054\u5408": 642,
|
| 641 |
+
"\u7279\u70b9": 643,
|
| 642 |
+
"\u8fd0\u884c": 644,
|
| 643 |
+
"\u5355\u5143": 645,
|
| 644 |
+
"\u5efa\u6a21": 646,
|
| 645 |
+
"\u4e0d\u786e\u5b9a\u6027": 647,
|
| 646 |
+
"\u65f6\u95f4\u5c3a\u5ea6": 648,
|
| 647 |
+
"\u534f\u8c03": 649,
|
| 648 |
+
"\u4f18\u5316": 650,
|
| 649 |
+
"\u80fd\u91cf": 651,
|
| 650 |
+
"\u7ba1\u7406": 652,
|
| 651 |
+
"\u5f00\u5c55": 653,
|
| 652 |
+
"\u673a\u7ec4": 654,
|
| 653 |
+
"\u53d1\u7535": 655,
|
| 654 |
+
"\u5305\u542b": 656,
|
| 655 |
+
"\u7ed9": 657,
|
| 656 |
+
"\u5236": 658,
|
| 657 |
+
"\u968f": 659,
|
| 658 |
+
"\u8d1f\u8f7d": 660,
|
| 659 |
+
"\u7387": 661,
|
| 660 |
+
"\u53d8\u5316": 662,
|
| 661 |
+
"\u533a\u95f4": 663,
|
| 662 |
+
"\u5bf9\u7cfb\u7edf": 664,
|
| 663 |
+
"\u4e4b\u524d": 665,
|
| 664 |
+
"\u8003\u8651": 666,
|
| 665 |
+
"\u53ef": 667,
|
| 666 |
+
"\u518d\u751f\u80fd\u6e90": 668,
|
| 667 |
+
"\u51fa\u529b": 669,
|
| 668 |
+
"\u786e\u5b9a": 670,
|
| 669 |
+
"\u7075\u654f\u5ea6": 671,
|
| 670 |
+
"\u5316": 672,
|
| 671 |
+
"\u8c03\u5ea6": 673,
|
| 672 |
+
"\u8f6c\u5316": 674,
|
| 673 |
+
"\u4e24\u4e2a": 675,
|
| 674 |
+
"\u6df7\u5408": 676,
|
| 675 |
+
"\u6c42\u89e3": 677,
|
| 676 |
+
"\u5b9e\u65f6": 678,
|
| 677 |
+
"\u9636\u6bb5": 679,
|
| 678 |
+
"\u6839\u636e": 680,
|
| 679 |
+
"\u5185": 681,
|
| 680 |
+
"\u9152\u74f6\u76d6": 682,
|
| 681 |
+
"\u5370\u5237": 683,
|
| 682 |
+
"\u677f": 684,
|
| 683 |
+
"\u7f3a\u9677": 685,
|
| 684 |
+
"\u89c6\u89c9": 686,
|
| 685 |
+
"\u7f3a\u5931": 687,
|
| 686 |
+
"\u673a\u5668": 688,
|
| 687 |
+
"\u65b9\u5f0f": 689,
|
| 688 |
+
"\u4ee5\u4e0b": 690,
|
| 689 |
+
"\u51e0": 691,
|
| 690 |
+
".": 692,
|
| 691 |
+
"\u6210\u50cf": 693,
|
| 692 |
+
"\u56fe\u50cf\u5904\u7406": 694,
|
| 693 |
+
"\u8f6f\u4ef6\u8bbe\u8ba1": 695,
|
| 694 |
+
"\u4ece\u800c": 696,
|
| 695 |
+
"\u8868\u9762": 697,
|
| 696 |
+
"\u5e38\u89c1": 698,
|
| 697 |
+
"\u65e2\u6709": 699,
|
| 698 |
+
"\u53c8": 700,
|
| 699 |
+
"\u64b0\u5199": 701,
|
| 700 |
+
"\u6761\u4f8b": 702,
|
| 701 |
+
"\u79d1\u6280": 703,
|
| 702 |
+
"\u4e8e": 704,
|
| 703 |
+
"\u65b0\u80fd\u6e90": 705,
|
| 704 |
+
"\u98ce\u673a": 706,
|
| 705 |
+
"\u6545\u969c\u8bca\u65ad": 707,
|
| 706 |
+
"\u57f9\u517b": 708,
|
| 707 |
+
"\u65b9\u5411": 709,
|
| 708 |
+
"\u7eea\u8bba": 710,
|
| 709 |
+
"\u63cf\u8ff0": 711,
|
| 710 |
+
"\u7cbe\u786e": 712,
|
| 711 |
+
"\u5b50": 713,
|
| 712 |
+
"\u8fa8\u8bc6": 714,
|
| 713 |
+
"K": 715,
|
| 714 |
+
"L": 716,
|
| 715 |
+
"\u98ce\u7535": 717,
|
| 716 |
+
"\u4e88\u4ee5": 718,
|
| 717 |
+
"\u72b6\u6001": 719,
|
| 718 |
+
"\u8bc4\u4f30": 720,
|
| 719 |
+
"\u6df1\u523b": 721,
|
| 720 |
+
"\u8ba4\u8bc6": 722,
|
| 721 |
+
"\u5b8c\u6574": 723,
|
| 722 |
+
"\u8bed\u8a00": 724,
|
| 723 |
+
"\u78f7\u9178": 725,
|
| 724 |
+
"\u8f9b\u916f": 726,
|
| 725 |
+
"\u5408\u6210": 727,
|
| 726 |
+
"\u5de5\u4e1a": 728,
|
| 727 |
+
"\u3000": 729,
|
| 728 |
+
"\u4e09": 730,
|
| 729 |
+
"\u539f\u6599": 731,
|
| 730 |
+
"\u50ac\u5316\u5242": 732,
|
| 731 |
+
"\u7ecf\u8fc7": 733,
|
| 732 |
+
"\u5148": 734,
|
| 733 |
+
"\u90e8\u5206": 735,
|
| 734 |
+
"\u916f\u5316": 736,
|
| 735 |
+
"\u4ea7\u7269": 737,
|
| 736 |
+
"\u5de5\u827a": 738,
|
| 737 |
+
"\u7b80\u5355": 739,
|
| 738 |
+
"\u53ef\u63a7": 740,
|
| 739 |
+
"\u76f8\u5bf9": 741,
|
| 740 |
+
"\u8bba\u636e": 742,
|
| 741 |
+
"\u53ef\u9760": 743,
|
| 742 |
+
"\u53ef\u4fe1": 744,
|
| 743 |
+
"\u6761\u7406\u6e05\u6670": 745,
|
| 744 |
+
"\u7ed3\u679c": 746,
|
| 745 |
+
"\u53c2\u8003\u4ef7\u503c": 747,
|
| 746 |
+
"\u5df2\u7ecf": 748,
|
| 747 |
+
"\u7533\u8bf7": 749,
|
| 748 |
+
"\u7ec4\u7ec7": 750,
|
| 749 |
+
"\u7535\u8def": 751,
|
| 750 |
+
"\u78c1": 752,
|
| 751 |
+
"\u6574\u6d41": 753,
|
| 752 |
+
"\u5f62\u6210": 754,
|
| 753 |
+
"\u6700\u7ec8": 755,
|
| 754 |
+
"\u5bbd": 756,
|
| 755 |
+
"\u8f6c\u901f": 757,
|
| 756 |
+
"\u8f93\u51fa": 758,
|
| 757 |
+
"\u8f83\u5927": 759,
|
| 758 |
+
"\u5b9e\u7528\u4ef7\u503c": 760,
|
| 759 |
+
"\u52a8\u6001": 761,
|
| 760 |
+
"\u660e\u786e": 762,
|
| 761 |
+
"\u7535\u673a": 763,
|
| 762 |
+
"\u505a": 764,
|
| 763 |
+
"\u6761\u7406\u6e05\u695a": 765,
|
| 764 |
+
"\u8868\u8fbe\u80fd\u529b": 766,
|
| 765 |
+
"\u4ece\u6587\u4e2d": 767,
|
| 766 |
+
"\u770b\u51fa": 768,
|
| 767 |
+
"\u7b14\u8005": 769,
|
| 768 |
+
"\u8bfe\u9898": 770,
|
| 769 |
+
"\u7406\u8bba\u77e5\u8bc6": 771,
|
| 770 |
+
"\u7ed3\u5408\u5b9e\u9645": 772,
|
| 771 |
+
"\u67e5\u9605": 773,
|
| 772 |
+
"\u6587\u732e\u8d44\u6599": 774,
|
| 773 |
+
"\u9759\u6001": 775,
|
| 774 |
+
"\u8fd0\u52a8": 776,
|
| 775 |
+
"\u63d0\u53d6": 777,
|
| 776 |
+
"\u8ddf\u8e2a": 778,
|
| 777 |
+
"\u8def\u7ebf": 779,
|
| 778 |
+
"\u8bba\u6587\u683c\u5f0f": 780,
|
| 779 |
+
"\u79bb": 781,
|
| 780 |
+
"\u8f74\u5f0f": 782,
|
| 781 |
+
"\u5207\u5272": 783,
|
| 782 |
+
"\u8bd5\u9a8c": 784,
|
| 783 |
+
"\u7efc\u5408": 785,
|
| 784 |
+
"\u8981": 786,
|
| 785 |
+
"\u76ee\u7684": 787,
|
| 786 |
+
"\u7efc\u8ff0": 788,
|
| 787 |
+
"\u7f3a\u4e4f": 789,
|
| 788 |
+
"\u9002\u7528": 790,
|
| 789 |
+
"\u6d41\u4f53": 791,
|
| 790 |
+
"\u52a8\u529b\u5b66": 792,
|
| 791 |
+
"\u8f74": 793,
|
| 792 |
+
"\u8fd9\u4e9b": 794,
|
| 793 |
+
"\u5bf9\u6bd4": 795,
|
| 794 |
+
"\u5355": 796,
|
| 795 |
+
"\u5206\u522b": 797,
|
| 796 |
+
"\u8f85\u52a9": 798,
|
| 797 |
+
"\u96be\u6613": 799,
|
| 798 |
+
"\u6d89\u53ca": 800,
|
| 799 |
+
"\u52b1\u78c1": 801,
|
| 800 |
+
"\u76f4\u7ebf": 802,
|
| 801 |
+
"\u60ac\u6d6e": 803,
|
| 802 |
+
"\u540c\u6b65": 804,
|
| 803 |
+
"\u7535\u52a8\u673a": 805,
|
| 804 |
+
"\u673a\u7406": 806,
|
| 805 |
+
"\u78c1\u60ac\u6d6e": 807,
|
| 806 |
+
"\u795e\u7ecf\u7f51\u7edc": 808,
|
| 807 |
+
"\u7f51\u7edc": 809,
|
| 808 |
+
"\u89c4\u6a21": 810,
|
| 809 |
+
"\u5f62\u5f0f": 811,
|
| 810 |
+
"\u4e09\u5c42": 812,
|
| 811 |
+
"BP": 813,
|
| 812 |
+
"\u5b66\u4e60": 814,
|
| 813 |
+
"\u6539\u5584": 815,
|
| 814 |
+
"\u4e3b\u52a8": 816,
|
| 815 |
+
"\u8f74\u627f": 817,
|
| 816 |
+
"\u8f6c\u5b50": 818,
|
| 817 |
+
"\u632f\u52a8": 819,
|
| 818 |
+
"\u6b64\u57fa\u7840": 820,
|
| 819 |
+
"\u91cd\u590d": 821,
|
| 820 |
+
"\u76f8\u7ed3\u5408": 822,
|
| 821 |
+
"\u590d\u5408": 823,
|
| 822 |
+
"\u63d0": 824,
|
| 823 |
+
"\u7b56\u7565": 825,
|
| 824 |
+
"\u76f8\u5e94": 826,
|
| 825 |
+
"\u6838\u5fc3": 827,
|
| 826 |
+
"\u6570\u5b57": 828,
|
| 827 |
+
"\u826f\u597d": 829,
|
| 828 |
+
"\u7535\u6c14\u5de5\u7a0b": 830,
|
| 829 |
+
"\u8c03\u7814": 831,
|
| 830 |
+
"\u6d3b\u6027": 832,
|
| 831 |
+
"\u521d\u6b65": 833,
|
| 832 |
+
"\u4e00": 834,
|
| 833 |
+
"\u671f\u95f4": 835,
|
| 834 |
+
"\u53d1\u8868": 836,
|
| 835 |
+
"8": 837,
|
| 836 |
+
"\u7bc7": 838,
|
| 837 |
+
"\u4e00\u9879": 839,
|
| 838 |
+
"\u56de\u5f52": 840,
|
| 839 |
+
"\u65f6\u6548": 841,
|
| 840 |
+
"7075": 842,
|
| 841 |
+
"\u70ed\u5904\u7406": 843,
|
| 842 |
+
"\u5e38\u6e29": 844,
|
| 843 |
+
"\u4f4e\u6e29": 845,
|
| 844 |
+
"\u51b2\u51fb": 846,
|
| 845 |
+
"\u7528": 847,
|
| 846 |
+
"\u5148\u8fdb": 848,
|
| 847 |
+
"\u65ad\u53e3": 849,
|
| 848 |
+
"\u89c2\u5bdf": 850,
|
| 849 |
+
"\u6d4b\u91cf": 851,
|
| 850 |
+
"\u6240\u5f97": 852,
|
| 851 |
+
"\u540c\u7c7b": 853,
|
| 852 |
+
"\u53c2\u8003": 854,
|
| 853 |
+
"\u6240\u5b66": 855,
|
| 854 |
+
"\u89e3\u91ca": 856,
|
| 855 |
+
"\u79d1\u5b66": 857,
|
| 856 |
+
"\u73b0\u8c61": 858,
|
| 857 |
+
"\u9519\u8bef": 859,
|
| 858 |
+
"\u603b\u4f53": 860,
|
| 859 |
+
"\u66f4\u52a0": 861,
|
| 860 |
+
"\u591a\u6e90": 862,
|
| 861 |
+
"\u5fae\u7f51": 863,
|
| 862 |
+
"\u80fd\u6e90": 864,
|
| 863 |
+
"\u6784\u5efa": 865,
|
| 864 |
+
"\u4f20\u8f93": 866,
|
| 865 |
+
"\u8def\u7531": 867,
|
| 866 |
+
"\u62d3\u6251": 868,
|
| 867 |
+
"\u5206\u914d": 869,
|
| 868 |
+
"\u667a\u80fd": 870,
|
| 869 |
+
"\u4f53": 871,
|
| 870 |
+
"\u7fa4": 872,
|
| 871 |
+
"\u6709\u6548\u6027": 873,
|
| 872 |
+
"\u7406\u8bba\u4f9d\u636e": 874,
|
| 873 |
+
"\u53ef\u884c": 875,
|
| 874 |
+
"\u4eba\u4f53": 876,
|
| 875 |
+
"\u4e0a\u80a2": 877,
|
| 876 |
+
"\u808c\u8089": 878,
|
| 877 |
+
"\u529b": 879,
|
| 878 |
+
"\u8bad\u7ec3": 880,
|
| 879 |
+
"\u9488\u5bf9\u6027": 881,
|
| 880 |
+
"\u5b9e\u9645\u610f\u4e49": 882,
|
| 881 |
+
"\u51c6\u786e\u6027": 883,
|
| 882 |
+
"\u89e3\u51b3": 884,
|
| 883 |
+
"\u5f88": 885,
|
| 884 |
+
"\u9c81\u68d2\u6027": 886,
|
| 885 |
+
"\u80fd": 887,
|
| 886 |
+
"\u53cd\u63a8": 888,
|
| 887 |
+
"\u6765": 889,
|
| 888 |
+
"PMLSM": 890,
|
| 889 |
+
"\u77e2\u91cf": 891,
|
| 890 |
+
"\u4f4d\u7f6e": 892,
|
| 891 |
+
"\u660e\u663e\u63d0\u9ad8": 893,
|
| 892 |
+
"\u9002\u5e94": 894,
|
| 893 |
+
"\u5f84\u5411": 895,
|
| 894 |
+
"\u4ea7\u54c1": 896,
|
| 895 |
+
"\u88c5\u914d": 897,
|
| 896 |
+
"\u4f5c\u4e1a": 898,
|
| 897 |
+
"\u5236\u9020": 899,
|
| 898 |
+
"\u6210\u672c": 900,
|
| 899 |
+
"\u5360": 901,
|
| 900 |
+
"\u6bd4": 902,
|
| 901 |
+
"\u751f\u4ea7": 903,
|
| 902 |
+
"\u5468\u671f": 904,
|
| 903 |
+
"\u5fc5\u8981": 905,
|
| 904 |
+
"\u4eba": 906,
|
| 905 |
+
"\u56e0": 907,
|
| 906 |
+
"\u52a0\u5de5": 908,
|
| 907 |
+
"\u4e2d\u5fc3": 909,
|
| 908 |
+
"\u5916": 910,
|
| 909 |
+
"\u8212\u9002\u5ea6": 911,
|
| 910 |
+
"\u5f88\u5f3a": 912,
|
| 911 |
+
"\u7c7b\u4f3c": 913,
|
| 912 |
+
"\u6280\u672f\u624b\u6bb5": 914,
|
| 913 |
+
"\u73b0\u573a": 915,
|
| 914 |
+
"\u96f6\u90e8\u4ef6": 916,
|
| 915 |
+
"\u64cd\u4f5c": 917,
|
| 916 |
+
"\u4eba\u5458": 918,
|
| 917 |
+
"\u52a8\u4f5c": 919,
|
| 918 |
+
"\u521b\u5efa": 920,
|
| 919 |
+
"\u8ddd\u79bb": 921,
|
| 920 |
+
"\u9ad8\u5ea6": 922,
|
| 921 |
+
"\u5206\u7ea7": 923,
|
| 922 |
+
"\u91c7\u96c6": 924,
|
| 923 |
+
"\u6d41\u7a0b": 925,
|
| 924 |
+
"\u7535\u4fe1\u53f7": 926,
|
| 925 |
+
"\u7b49\u7ea7": 927,
|
| 926 |
+
"\u5212\u5206": 928,
|
| 927 |
+
"\u96be\u5ea6": 929,
|
| 928 |
+
"\u5927": 930,
|
| 929 |
+
"\u6307\u6570": 931,
|
| 930 |
+
"\u91cd\u65b0": 932,
|
| 931 |
+
"\u75b2\u52b3": 933,
|
| 932 |
+
"\u6837\u672c": 934,
|
| 933 |
+
"\u71b5": 935,
|
| 934 |
+
"\u505a\u51fa": 936,
|
| 935 |
+
"\u62c5\u8d1f": 937,
|
| 936 |
+
"\u4e13\u95e8": 938,
|
| 937 |
+
"\u683c\u5f0f": 939,
|
| 938 |
+
"\u5b66\u4f4d": 940,
|
| 939 |
+
"\u89c4\u5b9a": 941,
|
| 940 |
+
"\u5b66\u8005": 942,
|
| 941 |
+
"\u9884\u8b66": 943,
|
| 942 |
+
"\u6982\u8ff0": 944,
|
| 943 |
+
"BC": 945,
|
| 944 |
+
"\u623f\u5730\u4ea7": 946,
|
| 945 |
+
"\u72b6\u51b5": 947,
|
| 946 |
+
"\u7cfb\u7edf\u6027": 948,
|
| 947 |
+
"\u5173\u8054": 949,
|
| 948 |
+
"\u6cd5\u5bf9": 950,
|
| 949 |
+
"\u8d22\u52a1\u6307\u6807": 951,
|
| 950 |
+
"\u5171\u540c": 952,
|
| 951 |
+
"\u6784\u6210": 953,
|
| 952 |
+
"\u5206\u6790\u6cd5": 954,
|
| 953 |
+
"\u6307\u6807": 955,
|
| 954 |
+
"\u6743\u91cd": 956,
|
| 955 |
+
"\u7cfb\u6570": 957,
|
| 956 |
+
"\u5747": 958,
|
| 957 |
+
"\u5bf9\u7b56": 959,
|
| 958 |
+
"\u76d1\u7ba1": 960,
|
| 959 |
+
"\u52a0\u5f3a": 961,
|
| 960 |
+
"\u6295\u8d44": 962,
|
| 961 |
+
"\u9879\u76ee": 963,
|
| 962 |
+
"\u8425\u8fd0": 964,
|
| 963 |
+
"\u6d3b\u52a8": 965,
|
| 964 |
+
"\u57ce\u5e02": 966,
|
| 965 |
+
"\u4f9d\u6258": 967,
|
| 966 |
+
"\u8f7d\u4f53": 968,
|
| 967 |
+
"\u592f": 969,
|
| 968 |
+
"\u6269\u6869": 970,
|
| 969 |
+
"\u6869": 971,
|
| 970 |
+
"\u4e09\u7ef4": 972,
|
| 971 |
+
"\u8377\u8f7d": 973,
|
| 972 |
+
"\u7ec4\u5408": 974,
|
| 973 |
+
"\u6570\u503c": 975,
|
| 974 |
+
"\u4fa7": 976,
|
| 975 |
+
"\u7aef": 977,
|
| 976 |
+
"\u603b": 978,
|
| 977 |
+
"\u627f\u8f7d\u80fd\u529b": 979,
|
| 978 |
+
"\u6269\u6563": 980,
|
| 979 |
+
"\u6270\u52a8": 981,
|
| 980 |
+
"\u6700\u4f18": 982,
|
| 981 |
+
"\u89e3\u6790": 983,
|
| 982 |
+
"Zn": 984,
|
| 983 |
+
"\u8150\u8680\u6027": 985,
|
| 984 |
+
"\u91cf": 986,
|
| 985 |
+
"Al": 987,
|
| 986 |
+
"\u5143\u7d20": 988,
|
| 987 |
+
"\u94f8\u9020": 989,
|
| 988 |
+
"\u56db\u79cd": 990,
|
| 989 |
+
"\u6210\u5206": 991,
|
| 990 |
+
"\u663e\u5fae": 992,
|
| 991 |
+
"\u62c9\u4f38": 993,
|
| 992 |
+
"\u9541\u5408\u91d1": 994,
|
| 993 |
+
"\u7535\u5316\u5b66": 995,
|
| 994 |
+
"\u8150\u8680": 996,
|
| 995 |
+
"\u9884\u671f": 997,
|
| 996 |
+
"\u4e66\u5199": 998,
|
| 997 |
+
"\u6807\u51c6": 999,
|
| 998 |
+
"\u521b\u65b0": 1000,
|
| 999 |
+
"\u89c1\u89e3": 1001,
|
| 1000 |
+
"\u6709\u5173": 1002,
|
| 1001 |
+
"\u98ce\u529b": 1003,
|
| 1002 |
+
"\u53d1\u7535\u673a\u7ec4": 1004,
|
| 1003 |
+
"\u65e0\u529f": 1005,
|
| 1004 |
+
"\u53d8\u6d41\u5668": 1006,
|
| 1005 |
+
"\u56db": 1007,
|
| 1006 |
+
"\u76f4\u63a5": 1008,
|
| 1007 |
+
"\u6709\u529f": 1009,
|
| 1008 |
+
"\u964d\u4f4e": 1010,
|
| 1009 |
+
"\u8dcc\u843d": 1011,
|
| 1010 |
+
"\u6700\u5927": 1012,
|
| 1011 |
+
"\u4fdd\u8bc1": 1013,
|
| 1012 |
+
"\u5b89\u5168": 1014,
|
| 1013 |
+
"\u7a33\u5b9a": 1015,
|
| 1014 |
+
"\u63d0\u5347": 1016,
|
| 1015 |
+
"\u5269\u4f59": 1017,
|
| 1016 |
+
"\u5bb9\u91cf": 1018,
|
| 1017 |
+
"\u76f4\u6d41\u7535": 1019,
|
| 1018 |
+
"\u76f4\u6d41": 1020,
|
| 1019 |
+
"\u503c": 1021,
|
| 1020 |
+
"\u538b": 1022,
|
| 1021 |
+
"\u5e45\u503c": 1023,
|
| 1022 |
+
"\u7a33\u5b9a\u6027": 1024,
|
| 1023 |
+
"\u98ce\u7535\u573a": 1025,
|
| 1024 |
+
"\u96c6\u7fa4": 1026,
|
| 1025 |
+
"\u534f\u540c": 1027,
|
| 1026 |
+
"\u8c03\u9891": 1028,
|
| 1027 |
+
"\u8c03\u8282": 1029,
|
| 1028 |
+
"\u5c42": 1030,
|
| 1029 |
+
"\u50a8\u80fd": 1031,
|
| 1030 |
+
"\u53c2\u4e0e": 1032,
|
| 1031 |
+
"\u9891\u7387": 1033,
|
| 1032 |
+
"\u6587\u4e2d": 1034,
|
| 1033 |
+
"\u57fa\u672c\u4e0a": 1035,
|
| 1034 |
+
"\u53d8\u538b\u5668": 1036,
|
| 1035 |
+
"\u7ebf\u5708": 1037,
|
| 1036 |
+
"\u4f20\u9012": 1038,
|
| 1037 |
+
"\u8f74\u5411": 1039,
|
| 1038 |
+
"\u6db2\u538b": 1040,
|
| 1039 |
+
"\u5408\u95f8": 1041,
|
| 1040 |
+
"\u95f4": 1042,
|
| 1041 |
+
"\u63a5\u89e6": 1043,
|
| 1042 |
+
"\u6539\u53d8": 1044,
|
| 1043 |
+
"\u8f7d\u8377": 1045,
|
| 1044 |
+
"\u5927\u5c0f": 1046,
|
| 1045 |
+
"\u5faa\u73af": 1047,
|
| 1046 |
+
"\u6b21\u6570": 1048,
|
| 1047 |
+
"\u6709\u9650\u5143\u6cd5": 1049,
|
| 1048 |
+
"\u53d7\u5230": 1050,
|
| 1049 |
+
"\u53d1\u751f": 1051,
|
| 1050 |
+
"\u53d1\u7535\u673a": 1052,
|
| 1051 |
+
"\u65e0": 1053,
|
| 1052 |
+
"\u4f20\u611f\u5668": 1054,
|
| 1053 |
+
"\u6709\u76ca": 1055,
|
| 1054 |
+
"\u63a2\u7d22": 1056,
|
| 1055 |
+
"\u5df2": 1057,
|
| 1056 |
+
"\u6ee1\u8db3": 1058,
|
| 1057 |
+
"\u4f30\u8ba1": 1059,
|
| 1058 |
+
"\u6240\u63d0": 1060,
|
| 1059 |
+
"10": 1061,
|
| 1060 |
+
"\u8bf4\u670d\u529b": 1062,
|
| 1061 |
+
"\u6b20\u7f3a": 1063,
|
| 1062 |
+
"\u6c7d\u8f66": 1064,
|
| 1063 |
+
"\u6709\u9650\u516c\u53f8": 1065,
|
| 1064 |
+
"\u7814\u5236": 1066,
|
| 1065 |
+
"\u8f66\u6865": 1067,
|
| 1066 |
+
"\u7535\u52a8\u6c7d\u8f66": 1068,
|
| 1067 |
+
"\u7b80\u5316": 1069,
|
| 1068 |
+
"\u5bff\u547d": 1070,
|
| 1069 |
+
"\u53d7\u529b": 1071,
|
| 1070 |
+
"\u4fee\u6b63": 1072,
|
| 1071 |
+
"\u6750\u6599": 1073,
|
| 1072 |
+
"S": 1074,
|
| 1073 |
+
"\u66f2\u7ebf": 1075,
|
| 1074 |
+
"\u5df2\u6709": 1076,
|
| 1075 |
+
"\u6210\u719f": 1077,
|
| 1076 |
+
"\u4f7f\u7528\u5bff\u547d": 1078,
|
| 1077 |
+
"\u5960\u5b9a": 1079,
|
| 1078 |
+
"\u8be6\u5c3d": 1080,
|
| 1079 |
+
"\u53d8\u91cf": 1081,
|
| 1080 |
+
"\u6b64\u5916": 1082,
|
| 1081 |
+
"\u8bc1\u660e": 1083,
|
| 1082 |
+
"\u5408\u7406\u6027": 1084,
|
| 1083 |
+
"\u7efc\u4e0a\u6240\u8ff0": 1085,
|
| 1084 |
+
"\u8ba4\u4e3a": 1086,
|
| 1085 |
+
"\u5b89\u6392": 1087,
|
| 1086 |
+
"\u7136\u540e": 1088,
|
| 1087 |
+
"\u8bc4\u5ba1": 1089,
|
| 1088 |
+
"\u8865\u5145": 1090,
|
| 1089 |
+
"\u5149\u7ea4": 1091,
|
| 1090 |
+
"\u6df1\u5165\u7814\u7a76": 1092,
|
| 1091 |
+
"\u6bcf": 1093,
|
| 1092 |
+
"\u4e0d\u591f": 1094,
|
| 1093 |
+
"\u52a8\u624b": 1095,
|
| 1094 |
+
"\u6545": 1096,
|
| 1095 |
+
"\u8be5\u6587": 1097,
|
| 1096 |
+
"\u89c6\u89d2": 1098,
|
| 1097 |
+
"\u65b0\u9896": 1099,
|
| 1098 |
+
"\u5f97\u5f53": 1100,
|
| 1099 |
+
"\u5de5\u5177": 1101,
|
| 1100 |
+
"\u7fd4\u5b9e": 1102,
|
| 1101 |
+
"\u6db2": 1103,
|
| 1102 |
+
"\u751f\u7269": 1104,
|
| 1103 |
+
"\u7535": 1105,
|
| 1104 |
+
"\u963b\u6297": 1106,
|
| 1105 |
+
"\u5206\u6790\u65b9\u6cd5": 1107,
|
| 1106 |
+
"\u7acb\u9898": 1108,
|
| 1107 |
+
"\u5b9e\u9a8c\u8bbe\u8ba1": 1109,
|
| 1108 |
+
"\u4ea4\u53c9": 1110,
|
| 1109 |
+
"\u9274\u4e8e": 1111,
|
| 1110 |
+
"\u8fd1\u5e74\u6765": 1112,
|
| 1111 |
+
"\u8bba\u70b9": 1113,
|
| 1112 |
+
"\u81ea\u5df1": 1114,
|
| 1113 |
+
"\u663e\u8457": 1115,
|
| 1114 |
+
"\u8bc1\u636e": 1116,
|
| 1115 |
+
"\u8ba4\u5b9a": 1117,
|
| 1116 |
+
"\u53e6\u5916": 1118,
|
| 1117 |
+
"\u6ce8\u91ca": 1119,
|
| 1118 |
+
"\u53c2\u8003\u6587\u732e": 1120,
|
| 1119 |
+
"\u78c1\u901a": 1121,
|
| 1120 |
+
"\u70ed": 1122,
|
| 1121 |
+
"\u78c1\u6781": 1123,
|
| 1122 |
+
"\u6c38\u78c1\u4f53": 1124,
|
| 1123 |
+
"\u635f\u8017": 1125,
|
| 1124 |
+
"\u5f62\u72b6": 1126,
|
| 1125 |
+
"\u6da1\u6d41\u635f\u8017": 1127,
|
| 1126 |
+
"\u901a\u98ce": 1128,
|
| 1127 |
+
"\u6563\u70ed": 1129,
|
| 1128 |
+
"\u68c0\u5bdf\u673a\u5173": 1130,
|
| 1129 |
+
"\u63d0\u8d77": 1131,
|
| 1130 |
+
"\u884c\u653f": 1132,
|
| 1131 |
+
"\u516c\u76ca": 1133,
|
| 1132 |
+
"\u8bc9\u8bbc": 1134,
|
| 1133 |
+
"\u800c": 1135,
|
| 1134 |
+
"\u4fc3\u8fdb": 1136,
|
| 1135 |
+
"\u63a2\u8ba8": 1137,
|
| 1136 |
+
"\u5982": 1138,
|
| 1137 |
+
"\u7ae0": 1139,
|
| 1138 |
+
"\u4e3b\u4f53": 1140,
|
| 1139 |
+
"\u6269\u5927": 1141,
|
| 1140 |
+
"\u5916\u90e8": 1142,
|
| 1141 |
+
"\u673a\u5236": 1143,
|
| 1142 |
+
"\u529f\u5e95": 1144,
|
| 1143 |
+
"\u719f\u7ec3": 1145,
|
| 1144 |
+
"\u5f15\u8bc1": 1146,
|
| 1145 |
+
"\u4e14": 1147,
|
| 1146 |
+
"\u5199\u4f5c\u80fd\u529b": 1148,
|
| 1147 |
+
"\u603b\u4f53\u800c\u8a00": 1149,
|
| 1148 |
+
"\u6b64": 1150,
|
| 1149 |
+
"\u6446\u7ebf": 1151,
|
| 1150 |
+
"\u9f7f": 1152,
|
| 1151 |
+
"\u9525\u9f7f\u8f6e": 1153,
|
| 1152 |
+
"\u7d27\u5bc6": 1154,
|
| 1153 |
+
"\u4f20\u52a8": 1155,
|
| 1154 |
+
"\u66f4": 1156,
|
| 1155 |
+
"\u5927\u578b": 1157,
|
| 1156 |
+
"\u7cbe\u5ea6": 1158,
|
| 1157 |
+
"\u7528\u4e8e": 1159,
|
| 1158 |
+
"Q": 1160,
|
| 1159 |
+
"\u6570\u63a7": 1161,
|
| 1160 |
+
"\u87ba\u65cb": 1162,
|
| 1161 |
+
"\u9f7f\u8f6e": 1163,
|
| 1162 |
+
"\u5207\u524a": 1164,
|
| 1163 |
+
"\u83b7\u5f97": 1165,
|
| 1164 |
+
"\u6709\u5229\u4e8e": 1166,
|
| 1165 |
+
"\u5206\u660e": 1167,
|
| 1166 |
+
"BLDCM": 1168,
|
| 1167 |
+
"\u8f6c\u77e9": 1169,
|
| 1168 |
+
"\u8109\u52a8": 1170,
|
| 1169 |
+
"\u51fd\u6570": 1171,
|
| 1170 |
+
"DTC": 1172,
|
| 1171 |
+
"TSF": 1173,
|
| 1172 |
+
"\u66f4\u597d": 1174,
|
| 1173 |
+
"\u6570\u5b66": 1175,
|
| 1174 |
+
"\u6570\u636e\u6316\u6398": 1176,
|
| 1175 |
+
"\u8fde\u9501": 1177,
|
| 1176 |
+
"\u4e2a": 1178,
|
| 1177 |
+
"\u4e0d\u8db3\u4e4b\u5904": 1179,
|
| 1178 |
+
"\u53ef\u884c\u6027": 1180,
|
| 1179 |
+
"\u6574\u7406": 1181,
|
| 1180 |
+
"\u5404\u4e2a": 1182,
|
| 1181 |
+
"\u6570\u636e\u5e93": 1183,
|
| 1182 |
+
"\u6a21\u5f0f": 1184,
|
| 1183 |
+
"\u6316\u6398": 1185,
|
| 1184 |
+
"\u53ea\u662f": 1186,
|
| 1185 |
+
"\u6df1\u5ea6": 1187,
|
| 1186 |
+
"\u5355\u6676": 1188,
|
| 1187 |
+
"\u670d\u5f79": 1189,
|
| 1188 |
+
"\u590d\u6742": 1190,
|
| 1189 |
+
"\u957f\u671f": 1191,
|
| 1190 |
+
"\u8815\u53d8": 1192,
|
| 1191 |
+
"\u5f62\u8c8c": 1193,
|
| 1192 |
+
"\u6f14\u5316": 1194,
|
| 1193 |
+
"\u754c\u9762": 1195,
|
| 1194 |
+
"\u53d8\u5316\u89c4\u5f8b": 1196,
|
| 1195 |
+
"\u7c89\u672b": 1197,
|
| 1196 |
+
"\u6210\u578b": 1198,
|
| 1197 |
+
"\u771f\u7a7a": 1199,
|
| 1198 |
+
"\u70e7\u7ed3": 1200,
|
| 1199 |
+
"\u63a2\u7a76": 1201,
|
| 1200 |
+
"\u65f6\u95f4": 1202,
|
| 1201 |
+
"Si": 1203,
|
| 1202 |
+
"\u5c01\u88c5": 1204,
|
| 1203 |
+
"\u590d\u5408\u6750\u6599": 1205,
|
| 1204 |
+
"\u8ba8\u8bba": 1206,
|
| 1205 |
+
"\u5c31": 1207,
|
| 1206 |
+
"\u800c\u8a00": 1208,
|
| 1207 |
+
"\u9898\u76ee": 1209,
|
| 1208 |
+
"\u8fd8\u6709": 1210,
|
| 1209 |
+
"\u6765\u8bf4": 1211,
|
| 1210 |
+
"\u6ca1\u6709": 1212,
|
| 1211 |
+
"\u7279\u522b": 1213,
|
| 1212 |
+
"\u6c88\u9633": 1214,
|
| 1213 |
+
"\u5927\u5b66": 1215,
|
| 1214 |
+
"\u6388\u4e88": 1216,
|
| 1215 |
+
"\u7ecf": 1217,
|
| 1216 |
+
"\u5c11\u91cf": 1218,
|
| 1217 |
+
"\u91cf\u5b50": 1219,
|
| 1218 |
+
"\u8bc6\u522b": 1220,
|
| 1219 |
+
"n": 1221,
|
| 1220 |
+
"\u56fe\u50cf": 1222,
|
| 1221 |
+
"\u5b58\u50a8": 1223,
|
| 1222 |
+
"\u5377\u79ef": 1224,
|
| 1223 |
+
"\u96c6": 1225,
|
| 1224 |
+
".%": 1226,
|
| 1225 |
+
"\u6676\u7c92": 1227,
|
| 1226 |
+
"\u5c3a\u5bf8": 1228,
|
| 1227 |
+
"\u7ed9\u51fa": 1229,
|
| 1228 |
+
"\u949b": 1230,
|
| 1229 |
+
"\u6297": 1231,
|
| 1230 |
+
"\u5e7f\u6cdb\u5e94\u7528": 1232,
|
| 1231 |
+
"\u5851\u6027": 1233,
|
| 1232 |
+
"\u9700": 1234,
|
| 1233 |
+
"\u6d41\u53d8": 1235,
|
| 1234 |
+
"\u7f3a\u70b9": 1236,
|
| 1235 |
+
"\u4f5c": 1237,
|
| 1236 |
+
"\u8bc4\u8ff0": 1238,
|
| 1237 |
+
"Ti": 1239,
|
| 1238 |
+
"6Al": 1240,
|
| 1239 |
+
"4V": 1241,
|
| 1240 |
+
"TiC": 1242,
|
| 1241 |
+
"TiB": 1243,
|
| 1242 |
+
"\u589e\u5f3a": 1244,
|
| 1243 |
+
"\u5747\u5300": 1245,
|
| 1244 |
+
"\u6cbf": 1246,
|
| 1245 |
+
"\u521d\u59cb": 1247,
|
| 1246 |
+
"\u03b2": 1248,
|
| 1247 |
+
"\u7ec6\u5316": 1249,
|
| 1248 |
+
"\u8d8b\u52bf": 1250,
|
| 1249 |
+
"\u7ec4\u6210": 1251,
|
| 1250 |
+
"\u5f3a\u5ea6": 1252,
|
| 1251 |
+
"\u4e0b\u964d": 1253,
|
| 1252 |
+
"\u2103": 1254,
|
| 1253 |
+
"s": 1255,
|
| 1254 |
+
"\u5cf0\u503c": 1256,
|
| 1255 |
+
"\u7531": 1257,
|
| 1256 |
+
"MPa": 1258,
|
| 1257 |
+
"5": 1259,
|
| 1258 |
+
"\u672c\u6784": 1260,
|
| 1259 |
+
"\u65b9\u7a0b": 1261,
|
| 1260 |
+
"6": 1262,
|
| 1261 |
+
"7": 1263,
|
| 1262 |
+
"\u56fe": 1264,
|
| 1263 |
+
"\u5bf9\u5e94": 1265,
|
| 1264 |
+
"\u6700\u4f73": 1266,
|
| 1265 |
+
"\u6570\u636e\u5904\u7406": 1267,
|
| 1266 |
+
"\u5145\u5b9e": 1268,
|
| 1267 |
+
"\u5b66\u79d1\u4e13\u4e1a": 1269,
|
| 1268 |
+
"\u4e13\u4e1a\u6280\u80fd": 1270,
|
| 1269 |
+
"\u7ee7\u7535\u5668": 1271,
|
| 1270 |
+
"\u4e3a\u9898": 1272,
|
| 1271 |
+
"\u56fa\u6709\u9891\u7387": 1273,
|
| 1272 |
+
"\u7c27\u7247": 1274,
|
| 1273 |
+
"\u5b89\u88c5": 1275,
|
| 1274 |
+
"\u9650\u4f4d": 1276,
|
| 1275 |
+
"\u7247": 1277,
|
| 1276 |
+
"\u9650\u5236": 1278,
|
| 1277 |
+
"\u4e13\u95e8\u77e5\u8bc6": 1279,
|
| 1278 |
+
"\u949b\u5408\u91d1": 1280,
|
| 1279 |
+
"\u58f0": 1281,
|
| 1280 |
+
"\u53d1\u5c04": 1282,
|
| 1281 |
+
"\u83b7\u53d6": 1283,
|
| 1282 |
+
"\u635f\u4f24": 1284,
|
| 1283 |
+
"\u91d1\u5c5e": 1285,
|
| 1284 |
+
"\u76d1\u6d4b": 1286,
|
| 1285 |
+
"\u8bc6\u522b\u65b9\u6cd5": 1287,
|
| 1286 |
+
"\u63a8\u8350": 1288,
|
| 1287 |
+
"\u00b0": 1289,
|
| 1288 |
+
"\u4e09\u76f8": 1290,
|
| 1289 |
+
"\u89e3": 1291,
|
| 1290 |
+
"\u96f6": 1292,
|
| 1291 |
+
"\u4f1a": 1293,
|
| 1292 |
+
"\u5bfc\u81f4": 1294,
|
| 1293 |
+
"\u4ee3\u66ff": 1295,
|
| 1294 |
+
"SVPWM": 1296,
|
| 1295 |
+
"\u590d\u6742\u5ea6": 1297,
|
| 1296 |
+
"\u5f53\u4eca": 1298,
|
| 1297 |
+
"\u4e16\u754c": 1299,
|
| 1298 |
+
"\u4fa6\u67e5": 1300,
|
| 1299 |
+
"\u7740": 1301,
|
| 1300 |
+
"\u4fb5\u6743": 1302,
|
| 1301 |
+
"\u53ef\u80fd": 1303,
|
| 1302 |
+
"\u635f\u5bb3": 1304,
|
| 1303 |
+
"\u7a0b\u5e8f": 1305,
|
| 1304 |
+
"\u89c4\u5236": 1306,
|
| 1305 |
+
"\u73b0\u884c": 1307,
|
| 1306 |
+
"\u6551\u6d4e": 1308,
|
| 1307 |
+
"\u53f8\u6cd5": 1309,
|
| 1308 |
+
"\u4ee5\u6b64": 1310,
|
| 1309 |
+
"\u610f\u8bc6": 1311,
|
| 1310 |
+
"\u5f3a\u70c8": 1312,
|
| 1311 |
+
"\u4e4b\u4e0a": 1313,
|
| 1312 |
+
"\u53ef\u64cd\u4f5c\u6027": 1314,
|
| 1313 |
+
"\u4f7f\u5f97": 1315,
|
| 1314 |
+
"\u786c\u4ef6": 1316,
|
| 1315 |
+
"B": 1317,
|
| 1316 |
+
"\u51cf\u5c0f": 1318,
|
| 1317 |
+
"\u89c2": 1319,
|
| 1318 |
+
"\u89c6\u57df": 1320,
|
| 1319 |
+
"\u719f\u6089": 1321,
|
| 1320 |
+
"\u57fa\u672c\u539f\u7406": 1322,
|
| 1321 |
+
"\u8bed\u8a00\u8868\u8fbe": 1323,
|
| 1322 |
+
"\u987a\u7545": 1324,
|
| 1323 |
+
"PEO": 1325,
|
| 1324 |
+
"PLA": 1326,
|
| 1325 |
+
"\u56fa\u6001": 1327,
|
| 1326 |
+
"\u805a\u5408\u7269": 1328,
|
| 1327 |
+
"\u7535\u89e3\u8d28": 1329,
|
| 1328 |
+
"\u9502\u79bb\u5b50": 1330,
|
| 1329 |
+
"\u7535\u6c60": 1331,
|
| 1330 |
+
"\u7269": 1332,
|
| 1331 |
+
"LCI": 1333,
|
| 1332 |
+
"\u63ba\u6742": 1334,
|
| 1333 |
+
"\u81ea\u52a8": 1335,
|
| 1334 |
+
"\u7535\u5bfc\u7387": 1336,
|
| 1335 |
+
"\u7ea4\u7ef4": 1337,
|
| 1336 |
+
"\u5374": 1338,
|
| 1337 |
+
"\u4ee5\u4e0a": 1339,
|
| 1338 |
+
"\u8be6\u5b9e": 1340,
|
| 1339 |
+
"\u591a\u5b54": 1341,
|
| 1340 |
+
"\u5f15\u8d77": 1342,
|
| 1341 |
+
"\u9676\u74f7": 1343,
|
| 1342 |
+
"\u6d82\u5c42": 1344,
|
| 1343 |
+
"\u5b8c\u5168": 1345,
|
| 1344 |
+
"\u67e5\u8be2": 1346,
|
| 1345 |
+
"\u7b26\u5408\u5b9e\u9645": 1347,
|
| 1346 |
+
"\u8fc7\u5ea6": 1348,
|
| 1347 |
+
"\u533b\u7597": 1349,
|
| 1348 |
+
"\u8d23\u4efb": 1350,
|
| 1349 |
+
"\u7ef4\u62a4": 1351,
|
| 1350 |
+
"\u60a3\u8005": 1352,
|
| 1351 |
+
"\u7b2c": 1353,
|
| 1352 |
+
"\u4e13\u5bb6": 1354,
|
| 1353 |
+
"\u5b9e\u65bd": 1355,
|
| 1354 |
+
"\u7edf\u4e00": 1356,
|
| 1355 |
+
"\u4e0a\u8ff0": 1357,
|
| 1356 |
+
"\u65b0\u610f": 1358,
|
| 1357 |
+
"\u503c\u5f97": 1359,
|
| 1358 |
+
"\u6838\u5fc3\u90e8\u4ef6": 1360,
|
| 1359 |
+
"\u4e4b\u4e00": 1361,
|
| 1360 |
+
"\u6da1\u8f6e": 1362,
|
| 1361 |
+
"\u589e\u538b\u5668": 1363,
|
| 1362 |
+
"\u6210\u4e3a": 1364,
|
| 1363 |
+
"\u70ed\u70b9": 1365,
|
| 1364 |
+
"\u8bd5\u9a8c\u53f0": 1366,
|
| 1365 |
+
"\u4f9b\u6c14": 1367,
|
| 1366 |
+
"\u51fa\u53e3": 1368,
|
| 1367 |
+
"\u538b\u529b": 1369,
|
| 1368 |
+
"\u6210": 1370,
|
| 1369 |
+
"\u7535\u6c14": 1371,
|
| 1370 |
+
"\u6574\u4e2a": 1372,
|
| 1371 |
+
"\u6709\u5f85": 1373,
|
| 1372 |
+
"\u7535\u529b": 1374,
|
| 1373 |
+
"\u5b66": 1375,
|
| 1374 |
+
"\u4fe1\u53f7\u5904\u7406": 1376,
|
| 1375 |
+
"\u4e00\u4f53\u5316": 1377,
|
| 1376 |
+
"\u81ea\u7531\u5ea6": 1378,
|
| 1377 |
+
"\u5dee\u52a8": 1379,
|
| 1378 |
+
"\u8ba1\u7b97\u516c\u5f0f": 1380,
|
| 1379 |
+
"\u7535\u78c1": 1381,
|
| 1380 |
+
"\u4f4d\u79fb": 1382,
|
| 1381 |
+
"\u8f6d": 1383,
|
| 1382 |
+
"\u8ba1\u7b97\u7ed3\u679c": 1384,
|
| 1383 |
+
"\u53d7": 1385,
|
| 1384 |
+
"\u504f\u79fb": 1386,
|
| 1385 |
+
"\u6267\u884c\u5668": 1387,
|
| 1386 |
+
"\u4f9d\u636e": 1388,
|
| 1387 |
+
"\u7535\u5e73": 1389,
|
| 1388 |
+
"\u5f00\u5173": 1390,
|
| 1389 |
+
"\u56f4\u7ed5": 1391,
|
| 1390 |
+
"\u56fa\u4f53": 1392,
|
| 1391 |
+
"\u5fae\u6ce2": 1393,
|
| 1392 |
+
"\u653e\u5927": 1394,
|
| 1393 |
+
"\u6d88\u7eb3": 1395,
|
| 1394 |
+
"\u5206\u5e03\u5f0f": 1396,
|
| 1395 |
+
"\u7535\u6e90": 1397,
|
| 1396 |
+
"\u7535\u4ef7": 1398,
|
| 1397 |
+
"\u7ecf\u6d4e\u6027": 1399,
|
| 1398 |
+
"\u5229\u7528\u7387": 1400,
|
| 1399 |
+
"\u9002\u4e2d": 1401,
|
| 1400 |
+
"\u5e72\u6270": 1402,
|
| 1401 |
+
"\u79fb\u52a8\u673a\u5668\u4eba": 1403,
|
| 1402 |
+
"\u4fe1\u606f": 1404,
|
| 1403 |
+
"\u673a\u5668\u4eba": 1405,
|
| 1404 |
+
"\u76fe\u6784": 1406,
|
| 1405 |
+
"\u5730\u94c1": 1407,
|
| 1406 |
+
"\u96a7\u9053": 1408,
|
| 1407 |
+
"\u5efa\u8bbe": 1409,
|
| 1408 |
+
"\u4ec0\u4e48": 1410,
|
| 1409 |
+
"\u6216\u8005": 1411,
|
| 1410 |
+
"\u91cd\u5927": 1412,
|
| 1411 |
+
"\u7535\u5de5\u94a2": 1413,
|
| 1412 |
+
"\u65cb\u8f6c": 1414,
|
| 1413 |
+
"\u78c1\u5316": 1415,
|
| 1414 |
+
"\u78c1\u6ede": 1416,
|
| 1415 |
+
"\u5e38\u7528": 1417,
|
| 1416 |
+
"Jiles": 1418,
|
| 1417 |
+
"Atherton": 1419,
|
| 1418 |
+
"\u521d\u59cb\u503c": 1420,
|
| 1419 |
+
"k": 1421,
|
| 1420 |
+
"\u5bc6\u5ea6": 1422,
|
| 1421 |
+
"\u628a": 1423,
|
| 1422 |
+
"\u4e00\u53f0": 1424,
|
| 1423 |
+
"\u94c1\u5fc3": 1425,
|
| 1424 |
+
"\u78c1\u573a\u5f3a\u5ea6": 1426,
|
| 1425 |
+
"\u8bb8\u591a": 1427,
|
| 1426 |
+
"\u5e0c\u671b": 1428,
|
| 1427 |
+
"\u878d\u5408": 1429,
|
| 1428 |
+
"\u5de5\u5b66": 1430,
|
| 1429 |
+
"\u7acb\u8bba": 1431,
|
| 1430 |
+
"\u6709\u636e": 1432,
|
| 1431 |
+
"\u7ae0\u8282": 1433,
|
| 1432 |
+
"\u73af\u6c27\u6811\u8102": 1434,
|
| 1433 |
+
"\u6cbf\u9762": 1435,
|
| 1434 |
+
"\u95ea\u7edc": 1436,
|
| 1435 |
+
"\u6570\u636e\u5206\u6790": 1437,
|
| 1436 |
+
"\u7535\u6781": 1438,
|
| 1437 |
+
"\u538b\u5f3a": 1439,
|
| 1438 |
+
"\u501f\u52a9": 1440,
|
| 1439 |
+
"\u626b\u63cf": 1441,
|
| 1440 |
+
"\u7834\u574f": 1442,
|
| 1441 |
+
"\u504f\u78c1\u4e0b": 1443,
|
| 1442 |
+
"\u5341\u5206": 1444,
|
| 1443 |
+
"\u504f\u78c1": 1445,
|
| 1444 |
+
"\u6709\u65e0": 1446,
|
| 1445 |
+
"\u53d6\u5411": 1447,
|
| 1446 |
+
"\u7845\u94a2\u7247": 1448,
|
| 1447 |
+
"\u5bf9\u79f0": 1449,
|
| 1448 |
+
"\u516c\u5f0f": 1450,
|
| 1449 |
+
"\u53d8": 1451,
|
| 1450 |
+
"\u54cd\u5e94": 1452,
|
| 1451 |
+
"\u62df\u5408": 1453,
|
| 1452 |
+
"\u5f15\u7528": 1454,
|
| 1453 |
+
"\u865a\u62df": 1455,
|
| 1454 |
+
"\u53cc\u9988": 1456,
|
| 1455 |
+
"\u53d8\u6362\u5668": 1457,
|
| 1456 |
+
"\u6d88\u9664": 1458,
|
| 1457 |
+
"\u5206\u91cf": 1459,
|
| 1458 |
+
"\u8c10\u6ce2": 1460,
|
| 1459 |
+
"Matlab": 1461,
|
| 1460 |
+
"\u7f3a\u5c11": 1462,
|
| 1461 |
+
"\u8fbd\u5b81\u7701": 1463,
|
| 1462 |
+
"\u79d1\u6280\u56ed": 1464,
|
| 1463 |
+
"\u524d\u6cbf": 1465,
|
| 1464 |
+
"\u56fd\u5185": 1466,
|
| 1465 |
+
"\u7c92\u5b50": 1467,
|
| 1466 |
+
"\u6307\u51fa": 1468,
|
| 1467 |
+
"\u652f\u6491": 1469,
|
| 1468 |
+
"\u6587\u7b14\u6d41\u7545": 1470,
|
| 1469 |
+
"\u5f0f": 1471,
|
| 1470 |
+
"\u8c03\u901f": 1472,
|
| 1471 |
+
"DS": 1473,
|
| 1472 |
+
"\u4f18\u70b9": 1474,
|
| 1473 |
+
"\u78c1\u8def": 1475,
|
| 1474 |
+
"\u6ce8\u5165": 1476,
|
| 1475 |
+
"\u95ed\u73af": 1477,
|
| 1476 |
+
"\u5236\u4f5c": 1478,
|
| 1477 |
+
"\u6837\u673a": 1479,
|
| 1478 |
+
"\u6709\u7740": 1480,
|
| 1479 |
+
"\u9ad8\u9891": 1481,
|
| 1480 |
+
"\u9002\u5408": 1482,
|
| 1481 |
+
"\u907f\u7a0e": 1483,
|
| 1482 |
+
"\u8d27\u5e01\u653f\u7b56": 1484,
|
| 1483 |
+
"\u7ecf\u8425": 1485,
|
| 1484 |
+
"\u4e1a\u7ee9": 1486,
|
| 1485 |
+
"\u57fa\u672c\u6982\u5ff5": 1487,
|
| 1486 |
+
"\u4ee3\u7406": 1488,
|
| 1487 |
+
"\u66f4\u4e3a": 1489,
|
| 1488 |
+
"\u878d\u8d44": 1490,
|
| 1489 |
+
"\u7ea6\u675f": 1491,
|
| 1490 |
+
"\u5b9e\u52a1": 1492,
|
| 1491 |
+
"\u6765\u770b": 1493,
|
| 1492 |
+
"GIL": 1494,
|
| 1493 |
+
"\u5e03\u7f6e": 1495,
|
| 1494 |
+
"\u5fae\u7c92": 1496,
|
| 1495 |
+
"\u7279\u8272": 1497,
|
| 1496 |
+
"\u5176\u4e2d": 1498,
|
| 1497 |
+
"\u65bd\u52a0": 1499,
|
| 1498 |
+
"\u8f68\u8ff9": 1500,
|
| 1499 |
+
"\u8986\u819c": 1501,
|
| 1500 |
+
"\u539a\u5ea6": 1502,
|
| 1501 |
+
"\u7406\u8bba\u6307\u5bfc": 1503,
|
| 1502 |
+
"\u884c\u6587": 1504,
|
| 1503 |
+
"AGV": 1505,
|
| 1504 |
+
"\u907f\u969c": 1506,
|
| 1505 |
+
"\u884c\u9a76": 1507,
|
| 1506 |
+
"\u969c\u788d\u7269": 1508,
|
| 1507 |
+
"\u51cf\u5c11": 1509,
|
| 1508 |
+
"\u5e8f\u5217": 1510,
|
| 1509 |
+
"\u8f6c\u89d2": 1511,
|
| 1510 |
+
"\u6a21\u7cca\u63a7\u5236": 1512,
|
| 1511 |
+
"\u89c4\u5219": 1513,
|
| 1512 |
+
"\u7efc\u4e0a": 1514,
|
| 1513 |
+
"\u6b63\u5f26": 1515,
|
| 1514 |
+
"\u6fc0\u52b1": 1516,
|
| 1515 |
+
"\u8be6\u7ec6\u5206\u6790": 1517,
|
| 1516 |
+
"\u6ce2": 1518,
|
| 1517 |
+
"\u63a5\u5934": 1519,
|
| 1518 |
+
"\u56fa\u5b9a": 1520,
|
| 1519 |
+
"\u7ba1\u8def": 1521,
|
| 1520 |
+
"\u8fde\u63a5": 1522,
|
| 1521 |
+
"\u5411": 1523,
|
| 1522 |
+
"\u4ecb\u8d28": 1524,
|
| 1523 |
+
"\u9632\u6b62": 1525,
|
| 1524 |
+
"\u4e32\u8054": 1526,
|
| 1525 |
+
"\u6cc4\u6f0f": 1527,
|
| 1526 |
+
"\u6d41\u573a": 1528,
|
| 1527 |
+
"Fluent": 1529,
|
| 1528 |
+
"\u7740\u91cd": 1530,
|
| 1529 |
+
"\u5bc6\u5c01": 1531,
|
| 1530 |
+
"\u95f4\u9699": 1532,
|
| 1531 |
+
"\u63a5\u5730": 1533,
|
| 1532 |
+
"\u5355\u76f8\u63a5\u5730": 1534,
|
| 1533 |
+
"\u5f88\u5927": 1535,
|
| 1534 |
+
"\u6bd4\u4f8b": 1536,
|
| 1535 |
+
"\u94c1\u78c1": 1537,
|
| 1536 |
+
"\u9009\u7ebf": 1538,
|
| 1537 |
+
"\u96f6\u5e8f": 1539,
|
| 1538 |
+
"\u76f8\u4f4d": 1540,
|
| 1539 |
+
"\u5355\u4e00": 1541,
|
| 1540 |
+
"\u51c6\u786e\u5ea6": 1542,
|
| 1541 |
+
"\u6c14\u9699": 1543,
|
| 1542 |
+
"\u5982\u679c": 1544,
|
| 1543 |
+
"\u9519\u522b\u5b57": 1545,
|
| 1544 |
+
"\u533b\u9662": 1546,
|
| 1545 |
+
"\u7528\u6237": 1547,
|
| 1546 |
+
"DC": 1548,
|
| 1547 |
+
"\u5e26\u6765": 1549,
|
| 1548 |
+
"\u6307\u5bfc\u610f\u4e49": 1550,
|
| 1549 |
+
"\u80a1\u4efd": 1551,
|
| 1550 |
+
"\u8d44\u91d1": 1552,
|
| 1551 |
+
"\u4f1a\u8ba1": 1553,
|
| 1552 |
+
"\u6df7\u4e71": 1554,
|
| 1553 |
+
"\u5c24\u5176": 1555,
|
| 1554 |
+
"\u503a\u5238": 1556,
|
| 1555 |
+
"\u5c5e\u6027": 1557,
|
| 1556 |
+
"\u6307\u5bfc": 1558,
|
| 1557 |
+
"\u70bc\u94a2": 1559,
|
| 1558 |
+
"\u8fde\u94f8": 1560,
|
| 1559 |
+
"\u5b9a\u4e49": 1561,
|
| 1560 |
+
"\u89c4\u5212": 1562,
|
| 1561 |
+
"\u6700\u5c0f\u5316": 1563,
|
| 1562 |
+
"\u5206\u6563": 1564,
|
| 1563 |
+
"\u641c\u7d22\u7b97\u6cd5": 1565,
|
| 1564 |
+
"\u6392\u5e8f": 1566,
|
| 1565 |
+
"\u9057\u4f20\u7b97\u6cd5": 1567,
|
| 1566 |
+
"\u968f\u673a": 1568,
|
| 1567 |
+
"\u540c\u5b66": 1569,
|
| 1568 |
+
"\u672c\u95e8": 1570,
|
| 1569 |
+
"\u5bbd\u5e7f": 1571,
|
| 1570 |
+
"\u6dd8\u6d17": 1572,
|
| 1571 |
+
"\u601d\u8def\u6e05\u6670": 1573,
|
| 1572 |
+
"\u6e29\u5347": 1574,
|
| 1573 |
+
"\u9608\u503c": 1575,
|
| 1574 |
+
"\u6a21\u6001": 1576,
|
| 1575 |
+
"means": 1577,
|
| 1576 |
+
"\u805a\u7c7b": 1578,
|
| 1577 |
+
"\u79bb\u6563": 1579,
|
| 1578 |
+
"\u5c11": 1580,
|
| 1579 |
+
"\u63d0\u70bc": 1581,
|
| 1580 |
+
"\u6821\u6b63": 1582,
|
| 1581 |
+
"FV520B": 1583,
|
| 1582 |
+
"\u4e0d\u9508\u94a2": 1584,
|
| 1583 |
+
"\u94dd\u5408\u91d1": 1585,
|
| 1584 |
+
"\u786b\u5316\u7269": 1586,
|
| 1585 |
+
"\u542b": 1587,
|
| 1586 |
+
"\u5dee\u5f02": 1588,
|
| 1587 |
+
"\u6c27\u5316": 1589,
|
| 1588 |
+
"\u819c": 1590,
|
| 1589 |
+
"\u949d\u5316": 1591,
|
| 1590 |
+
"\u6eb6\u89e3": 1592,
|
| 1591 |
+
"\u88c2\u7eb9": 1593,
|
| 1592 |
+
"\u9633\u6781": 1594,
|
| 1593 |
+
"\u5f00\u88c2": 1595,
|
| 1594 |
+
"\u6269\u5c55": 1596,
|
| 1595 |
+
"\u786c\u5ea6": 1597,
|
| 1596 |
+
"\u53f6\u8f6e": 1598,
|
| 1597 |
+
"\u8870\u51cf": 1599,
|
| 1598 |
+
"\u8d8a": 1600,
|
| 1599 |
+
"\u9ad8\u6027\u80fd": 1601,
|
| 1600 |
+
"\u660e\u663e": 1602,
|
| 1601 |
+
"\u52a0\u5feb": 1603,
|
| 1602 |
+
"\u5f3a\u5316": 1604,
|
| 1603 |
+
"\u57fa\u4f53": 1605,
|
| 1604 |
+
"\u9ad8\u4e8e": 1606,
|
| 1605 |
+
"\u6982\u7387": 1607,
|
| 1606 |
+
"\u7269\u6d41\u914d\u9001": 1608,
|
| 1607 |
+
"\u53cc\u5c42": 1609,
|
| 1608 |
+
"\u7a97": 1610,
|
| 1609 |
+
"\u6ee1\u610f\u5ea6": 1611,
|
| 1610 |
+
"\u6392\u7248": 1612,
|
| 1611 |
+
"\u6253\u78e8": 1613,
|
| 1612 |
+
"\u5de5\u4ef6": 1614,
|
| 1613 |
+
"\u81ea\u52a8\u5316": 1615,
|
| 1614 |
+
"\u67d4\u6027": 1616,
|
| 1615 |
+
"\u5b9a\u4f4d": 1617,
|
| 1616 |
+
"\u53f0": 1618,
|
| 1617 |
+
"\u82af": 1619,
|
| 1618 |
+
"\u5939\u5177": 1620,
|
| 1619 |
+
"\u65b9\u6848\u8bbe\u8ba1": 1621,
|
| 1620 |
+
"\u5339\u914d": 1622,
|
| 1621 |
+
"\u8fd9\u79cd": 1623,
|
| 1622 |
+
"\u8fd0\u52a8\u5b66": 1624,
|
| 1623 |
+
"\u5173\u8282": 1625,
|
| 1624 |
+
"\u8c03\u8bd5": 1626,
|
| 1625 |
+
"\u78c1\u82af": 1627,
|
| 1626 |
+
"C": 1628,
|
| 1627 |
+
"\u7b49\u6548": 1629,
|
| 1628 |
+
"\u8ba1\u7b97\u65b9\u6cd5": 1630,
|
| 1629 |
+
"\u7ba1\u9053": 1631,
|
| 1630 |
+
"\u79bb\u5fc3\u6cf5": 1632,
|
| 1631 |
+
"\u51b2\u538b": 1633,
|
| 1632 |
+
"\u710a\u63a5": 1634,
|
| 1633 |
+
"\u53f6\u7247": 1635,
|
| 1634 |
+
"\u7814\u53d1": 1636,
|
| 1635 |
+
"\u79fb\u52a8": 1637,
|
| 1636 |
+
"\u8679\u819c": 1638,
|
| 1637 |
+
"\u4e3a\u6b64": 1639,
|
| 1638 |
+
"\u5b66\u79d1\u524d\u6cbf": 1640,
|
| 1639 |
+
"\u767d": 1641,
|
| 1640 |
+
"\u9152\u74f6": 1642,
|
| 1641 |
+
"\u74f6\u53e3": 1643,
|
| 1642 |
+
"\u5728\u7ebf": 1644,
|
| 1643 |
+
"\u96be\u9898": 1645,
|
| 1644 |
+
"\u6700\u5c0f": 1646,
|
| 1645 |
+
"\u7ea6": 1647,
|
| 1646 |
+
"\u8f68\u9053\u4ea4\u901a": 1648,
|
| 1647 |
+
"\u65ad\u8def\u5668": 1649,
|
| 1648 |
+
"\u540c\u6b65\u63a7\u5236": 1650,
|
| 1649 |
+
"\u64cd\u52a8": 1651,
|
| 1650 |
+
"\u6536\u655b": 1652,
|
| 1651 |
+
"\u69fd": 1653,
|
| 1652 |
+
"\u7535\u67a2": 1654,
|
| 1653 |
+
"\u5e73\u5747": 1655,
|
| 1654 |
+
"\u4e60\u8fd1\u5e73": 1656,
|
| 1655 |
+
"\u65f6\u4ee3": 1657,
|
| 1656 |
+
"\u4e2d\u56fd": 1658,
|
| 1657 |
+
"\u793e\u4f1a\u4e3b\u4e49": 1659,
|
| 1658 |
+
"\u5f53\u4ee3": 1660,
|
| 1659 |
+
"\u524d\u4eba": 1661,
|
| 1660 |
+
"\u542b\u6c34\u7387": 1662,
|
| 1661 |
+
"\u5783\u573e": 1663,
|
| 1662 |
+
"\u5404\u79cd": 1664,
|
| 1663 |
+
"\u73af\u4fdd": 1665,
|
| 1664 |
+
"\u5b9a\u91cf": 1666,
|
| 1665 |
+
"\u4f9b\u5e94\u94fe": 1667,
|
| 1666 |
+
"H": 1668,
|
| 1667 |
+
"\u628a\u63e1": 1669,
|
| 1668 |
+
"\u5f00\u91c7": 1670,
|
| 1669 |
+
"\u76d8": 1671,
|
| 1670 |
+
"\u94e3\u524a": 1672,
|
| 1671 |
+
"\u87ba\u6746\u6cf5": 1673,
|
| 1672 |
+
"\u5373": 1674,
|
| 1673 |
+
"\u5207\u524a\u529b": 1675,
|
| 1674 |
+
"\u81ea\u7531": 1676,
|
| 1675 |
+
"\u957f\u5ea6": 1677,
|
| 1676 |
+
"\u52a0\u5de5\u8fc7\u7a0b": 1678,
|
| 1677 |
+
"\u540e\u7eed": 1679,
|
| 1678 |
+
"\u6280\u80fd": 1680,
|
| 1679 |
+
"\u9ad8\u6821": 1681,
|
| 1680 |
+
"\u793e\u56e2": 1682,
|
| 1681 |
+
"\u79cd\u7c7b": 1683,
|
| 1682 |
+
"\u96be\u4ee5": 1684,
|
| 1683 |
+
"\u7ba1\u7406\u7cfb\u7edf": 1685,
|
| 1684 |
+
"\u67b6\u6784": 1686,
|
| 1685 |
+
"\u4fdd\u7406": 1687,
|
| 1686 |
+
"\u5408\u540c": 1688,
|
| 1687 |
+
"\u5f53\u4e8b\u4eba": 1689,
|
| 1688 |
+
"\u6743\u5229\u4e49\u52a1": 1690,
|
| 1689 |
+
"\u6001\u5ea6": 1691,
|
| 1690 |
+
"\u7814\u7a76\u8bfe\u9898": 1692,
|
| 1691 |
+
"\u7f13\u51b2": 1693,
|
| 1692 |
+
"\u521a\u5ea6": 1694,
|
| 1693 |
+
"\u53cd": 1695,
|
| 1694 |
+
"\u9ad8\u6548": 1696,
|
| 1695 |
+
"\u8272\u8c31": 1697,
|
| 1696 |
+
"\u8003\u5bdf": 1698,
|
| 1697 |
+
"\u6d41\u52a8": 1699,
|
| 1698 |
+
"\u6d41\u901f": 1700,
|
| 1699 |
+
"\u67f1": 1701,
|
| 1700 |
+
"mm": 1702,
|
| 1701 |
+
"\u00d7": 1703,
|
| 1702 |
+
":": 1704,
|
| 1703 |
+
"min": 1705,
|
| 1704 |
+
"\u65e0\u7ebf": 1706,
|
| 1705 |
+
"\u5e7f\u9614": 1707,
|
| 1706 |
+
"\u5206\u7c07": 1708,
|
| 1707 |
+
"\u8282\u70b9": 1709,
|
| 1708 |
+
"\u514d\u75ab": 1710,
|
| 1709 |
+
"\u6240\u6709": 1711,
|
| 1710 |
+
"\u5b83": 1712,
|
| 1711 |
+
"\u751f\u5b58": 1713,
|
| 1712 |
+
"\u63a8\u7406": 1714,
|
| 1713 |
+
"\u884c\u6587\u6d41\u7545": 1715,
|
| 1714 |
+
"\u5145\u7535": 1716,
|
| 1715 |
+
"\u5408\u9002": 1717,
|
| 1716 |
+
"\u4f9b\u7535\u7cfb\u7edf": 1718,
|
| 1717 |
+
"\u539f\u8fb9": 1719,
|
| 1718 |
+
"\u95f4\u8ddd": 1720,
|
| 1719 |
+
"\u8fdb\u800c": 1721,
|
| 1720 |
+
"\u5e76\u8054": 1722,
|
| 1721 |
+
"\u6587\u7406": 1723,
|
| 1722 |
+
"\u9502\u7535\u6c60": 1724,
|
| 1723 |
+
"Fe": 1725,
|
| 1724 |
+
"Ga": 1726,
|
| 1725 |
+
"\u88c5\u7f6e": 1727,
|
| 1726 |
+
"\u60ac\u81c2": 1728,
|
| 1727 |
+
"\u8f93\u51fa\u529f\u7387": 1729,
|
| 1728 |
+
"\u4f38\u7f29": 1730,
|
| 1729 |
+
"\u60ac\u81c2\u6881": 1731,
|
| 1730 |
+
"\u5f2f\u66f2": 1732,
|
| 1731 |
+
"\u5200\u5177": 1733,
|
| 1732 |
+
"\u4f4e\u901f": 1734,
|
| 1733 |
+
"\u7406\u89e3": 1735,
|
| 1734 |
+
"\u5c0f\u578b": 1736,
|
| 1735 |
+
"\u5e94\u8be5": 1737,
|
| 1736 |
+
"\u51e0\u4e4e": 1738,
|
| 1737 |
+
"\u8fc7\u4e8e": 1739,
|
| 1738 |
+
"\u5219": 1740,
|
| 1739 |
+
"\u8026\u5408": 1741,
|
| 1740 |
+
"\u5173\u952e\u6280\u672f": 1742,
|
| 1741 |
+
"\u5c55\u793a": 1743,
|
| 1742 |
+
"\u8f83\u5dee": 1744,
|
| 1743 |
+
"\u7a7a\u6c14": 1745,
|
| 1744 |
+
"\u5e73\u8861": 1746,
|
| 1745 |
+
"\u2014": 1747,
|
| 1746 |
+
"\u5546\u4e1a\u79d8\u5bc6": 1748,
|
| 1747 |
+
"\u6297\u8fa9": 1749,
|
| 1748 |
+
"\u5256\u6790": 1750,
|
| 1749 |
+
"\u73b0\u72b6\u53ca": 1751,
|
| 1750 |
+
"\u610f\u601d": 1752,
|
| 1751 |
+
"\u7ee9\u6548\u8003\u6838": 1753,
|
| 1752 |
+
"\u7ef4\u5ea6": 1754,
|
| 1753 |
+
"\u5386\u7a0b": 1755,
|
| 1754 |
+
"\u8230\u8f7d": 1756,
|
| 1755 |
+
"\u6297\u51b2\u51fb": 1757,
|
| 1756 |
+
"\u7f13\u51b2\u5668": 1758,
|
| 1757 |
+
"0.3": 1759,
|
| 1758 |
+
"\u56e0\u5b50": 1760,
|
| 1759 |
+
"\u6587\u672c": 1761,
|
| 1760 |
+
"\u5176\u4ed6": 1762,
|
| 1761 |
+
"\u6324\u538b": 1763,
|
| 1762 |
+
"\u7ecf\u5178": 1764,
|
| 1763 |
+
"\u78c1\u963b": 1765,
|
| 1764 |
+
"\u4e00\u6b3e": 1766,
|
| 1765 |
+
"\u954d": 1767,
|
| 1766 |
+
"\u77ed\u8def": 1768,
|
| 1767 |
+
"\u52a8\u529b": 1769,
|
| 1768 |
+
"\u6ce2\u5f62": 1770,
|
| 1769 |
+
"\u4f53\u79ef": 1771,
|
| 1770 |
+
"\u7535\u78c1\u573a": 1772,
|
| 1771 |
+
"\u52a0\u8f7d": 1773,
|
| 1772 |
+
"\u5f62": 1774,
|
| 1773 |
+
"\u5e94\u53d8": 1775,
|
| 1774 |
+
"\u73af\u7f51\u67dc": 1776,
|
| 1775 |
+
"\u6b63": 1777,
|
| 1776 |
+
"\u5f27\u5149": 1778,
|
| 1777 |
+
"\u6700": 1779,
|
| 1778 |
+
"\u5c31\u662f": 1780,
|
| 1779 |
+
"\u4ecd\u7136": 1781,
|
| 1780 |
+
"\u6a61\u80f6": 1782,
|
| 1781 |
+
"\u9ad8\u538b": 1783,
|
| 1782 |
+
"\u6db2\u4f53": 1784,
|
| 1783 |
+
"\u529b\u5b66": 1785,
|
| 1784 |
+
"\u78e8\u635f": 1786,
|
| 1785 |
+
"\u4e01\u8148\u6a61\u80f6": 1787,
|
| 1786 |
+
"\u6c1f\u6a61\u80f6": 1788,
|
| 1787 |
+
"\u73af\u5df1\u70f7": 1789,
|
| 1788 |
+
"\u8001\u5316": 1790,
|
| 1789 |
+
"\u6eb6\u80c0": 1791,
|
| 1790 |
+
"\u6469\u64e6": 1792,
|
| 1791 |
+
"\u6d53\u5ea6": 1793,
|
| 1792 |
+
"\u9884": 1794,
|
| 1793 |
+
"\u8bd5\u6837": 1795,
|
| 1794 |
+
"\u65ad\u88c2": 1796,
|
| 1795 |
+
"\u76f8\u540c": 1797,
|
| 1796 |
+
"\u6469\u64e6\u7cfb\u6570": 1798,
|
| 1797 |
+
"\u56fe\u6807": 1799,
|
| 1798 |
+
"\u5171\u4eab": 1800,
|
| 1799 |
+
"\u5c1a\u53ef": 1801,
|
| 1800 |
+
"\u5f97": 1802,
|
| 1801 |
+
"\u7b2c\u56db\u7ae0": 1803,
|
| 1802 |
+
"\u56db\u4e2a": 1804,
|
| 1803 |
+
"\u867d\u7136": 1805,
|
| 1804 |
+
"\u7b2c\u4e09\u7ae0": 1806,
|
| 1805 |
+
"\u8fd8\u662f": 1807,
|
| 1806 |
+
"\u7528\u8bed": 1808,
|
| 1807 |
+
"\u5fae\u6e90": 1809,
|
| 1808 |
+
"\u662f\u5426": 1810,
|
| 1809 |
+
"\u8868": 1811,
|
| 1810 |
+
"\u94a2\u7bb1": 1812,
|
| 1811 |
+
"\u62f1\u6865": 1813,
|
| 1812 |
+
"\u65bd\u5de5": 1814,
|
| 1813 |
+
"\u62f1": 1815,
|
| 1814 |
+
"\u6263\u5854": 1816,
|
| 1815 |
+
"\u6c11\u4e3b\u515a\u6d3e": 1817,
|
| 1816 |
+
"\u653f\u6cbb": 1818,
|
| 1817 |
+
"\u65e0\u6cd5": 1819,
|
| 1818 |
+
"\u56e0\u4e3a": 1820,
|
| 1819 |
+
"\u5fc5\u987b": 1821,
|
| 1820 |
+
"\u6709\u673a": 1822,
|
| 1821 |
+
"\u519c\u4e1a": 1823,
|
| 1822 |
+
"\u751f\u6001": 1824,
|
| 1823 |
+
"\u79d1\u6280\u8fdb\u6b65": 1825,
|
| 1824 |
+
"\u51fa\u8be5": 1826,
|
| 1825 |
+
"\u5927\u90e8\u5206": 1827,
|
| 1826 |
+
"\u8868\u8fbe\u6e05\u6670": 1828,
|
| 1827 |
+
"\u8f66": 1829,
|
| 1828 |
+
"\u8def\u9762": 1830,
|
| 1829 |
+
"\u8f66\u8f86": 1831,
|
| 1830 |
+
"\u566a\u58f0": 1832,
|
| 1831 |
+
"\u54c1\u724c": 1833,
|
| 1832 |
+
"\u5e38\u89c4": 1834,
|
| 1833 |
+
"\u5355\u4f4d": 1835,
|
| 1834 |
+
"\u4f8b\u5982": 1836,
|
| 1835 |
+
"\u6458\u8981": 1837,
|
| 1836 |
+
"\u4e4b": 1838,
|
| 1837 |
+
"\u652f\u6301": 1839,
|
| 1838 |
+
"\u91c7\u8d2d": 1840,
|
| 1839 |
+
"\u7269\u6599": 1841,
|
| 1840 |
+
"\u98ce\u9669\u7ba1\u7406": 1842,
|
| 1841 |
+
"\u5546\u4e1a\u94f6\u884c": 1843,
|
| 1842 |
+
"\u5ba1\u8ba1": 1844,
|
| 1843 |
+
"GS": 1845,
|
| 1844 |
+
"\u7b26\u53f7": 1846,
|
| 1845 |
+
"\u6811": 1847,
|
| 1846 |
+
"ANSYS": 1848,
|
| 1847 |
+
"\u5ea6": 1849,
|
| 1848 |
+
"\u76f4\u6d41\u7535\u673a": 1850,
|
| 1849 |
+
"\u5f53": 1851,
|
| 1850 |
+
"\u529f\u7387\u5bc6\u5ea6": 1852,
|
| 1851 |
+
"\u65e0\u5237": 1853,
|
| 1852 |
+
"\u6c11\u65cf": 1854,
|
| 1853 |
+
"\u624d": 1855,
|
| 1854 |
+
"\u7cbe\u795e": 1856,
|
| 1855 |
+
"\u4ef7\u503c\u89c2": 1857,
|
| 1856 |
+
"\u5c5e\u4e8e": 1858,
|
| 1857 |
+
"\u76f8\u6bd4": 1859,
|
| 1858 |
+
"\u4f18\u52bf": 1860,
|
| 1859 |
+
"\u9694\u79bb": 1861,
|
| 1860 |
+
"\u4e3b": 1862,
|
| 1861 |
+
"\u53cc\u76ee": 1863,
|
| 1862 |
+
"\u7acb\u4f53": 1864,
|
| 1863 |
+
"\u6293\u53d6": 1865,
|
| 1864 |
+
"\u5b66\u672f\u7814\u7a76": 1866,
|
| 1865 |
+
"\u76f8\u673a": 1867,
|
| 1866 |
+
"\u6807\u5b9a": 1868,
|
| 1867 |
+
"\u6ee4\u6ce2": 1869,
|
| 1868 |
+
"\u4f18\u4e8e": 1870,
|
| 1869 |
+
"\u5706\u67f1\u5f62": 1871,
|
| 1870 |
+
"\u5404\u7c7b": 1872,
|
| 1871 |
+
"\u503a\u52a1": 1873,
|
| 1872 |
+
"\u627e\u5230": 1874,
|
| 1873 |
+
"\u5047\u8bbe": 1875,
|
| 1874 |
+
"\u7a33\u5065\u6027": 1876,
|
| 1875 |
+
"\u94a2": 1877,
|
| 1876 |
+
"\u8f6f\u96c6": 1878,
|
| 1877 |
+
"\u76f4\u89c9": 1879,
|
| 1878 |
+
"\u6570": 1880,
|
| 1879 |
+
"Choquet": 1881,
|
| 1880 |
+
"\u903c\u8fd1": 1882,
|
| 1881 |
+
"\u7406\u60f3": 1883,
|
| 1882 |
+
"TOPSIS": 1884,
|
| 1883 |
+
"\u51b3\u7b56": 1885,
|
| 1884 |
+
"\u79ef\u5206": 1886,
|
| 1885 |
+
"\u96b6\u5c5e": 1887,
|
| 1886 |
+
"\u51b3\u7b56\u95ee\u9898": 1888,
|
| 1887 |
+
"\u800c\u4e14": 1889,
|
| 1888 |
+
"\u5f55\u7528": 1890,
|
| 1889 |
+
"\u5b66\u672f\u8bba\u6587": 1891,
|
| 1890 |
+
"\u9762\u5411": 1892,
|
| 1891 |
+
"\u5bfc\u5e08": 1893,
|
| 1892 |
+
"\u64cd\u4f5c\u6280\u80fd": 1894,
|
| 1893 |
+
"\u524d\u63d0": 1895,
|
| 1894 |
+
"\u81ea\u7136\u79d1\u5b66": 1896,
|
| 1895 |
+
"\u5de5\u7a0b\u6280\u672f": 1897,
|
| 1896 |
+
"\u5173\u952e\u95ee\u9898": 1898,
|
| 1897 |
+
"\u7535\u70ed": 1899,
|
| 1898 |
+
"\u55b7\u5634": 1900,
|
| 1899 |
+
"\u6781": 1901,
|
| 1900 |
+
"\u56de\u6d41": 1902,
|
| 1901 |
+
"\u5782\u76f4": 1903,
|
| 1902 |
+
"\u96fe\u5316": 1904,
|
| 1903 |
+
"\u89d2": 1905,
|
| 1904 |
+
"\u9897\u7c92": 1906,
|
| 1905 |
+
"3D": 1907,
|
| 1906 |
+
"\u771f\u5b9e": 1908,
|
| 1907 |
+
"\u81c2": 1909,
|
| 1908 |
+
"\u8fde\u6746": 1910,
|
| 1909 |
+
"\u5f39\u6027": 1911,
|
| 1910 |
+
"\u5feb\u901f": 1912,
|
| 1911 |
+
"\u524d\u9988": 1913,
|
| 1912 |
+
"\u8bef\u5dee": 1914,
|
| 1913 |
+
"\u89c2\u6d4b\u5668": 1915,
|
| 1914 |
+
"\u89d2\u901f\u5ea6": 1916,
|
| 1915 |
+
"\u8de8\u5883": 1917,
|
| 1916 |
+
"\u5e76\u8d2d": 1918,
|
| 1917 |
+
"\u7ba1\u58f3": 1919,
|
| 1918 |
+
"\u6362\u70ed\u5668": 1920,
|
| 1919 |
+
"\u5931\u6548": 1921,
|
| 1920 |
+
"\u5f15\u53d1": 1922,
|
| 1921 |
+
"\u673a\u68b0\u5de5\u7a0b": 1923,
|
| 1922 |
+
"\u6761\u7406\u5206\u660e": 1924,
|
| 1923 |
+
"\u5b66\u672f\u89c2\u70b9": 1925,
|
| 1924 |
+
"\u5206\u4e3a": 1926,
|
| 1925 |
+
"\u592a": 1927,
|
| 1926 |
+
"\u51b7\u85cf\u8f66": 1928,
|
| 1927 |
+
"\u836f\u54c1": 1929,
|
| 1928 |
+
"\u8fdc\u7a0b": 1930,
|
| 1929 |
+
"\u8f66\u8f7d": 1931,
|
| 1930 |
+
"\u4e3b\u63a7": 1932,
|
| 1931 |
+
"CPU": 1933,
|
| 1932 |
+
"\u5b9a\u4f4d\u7cfb\u7edf": 1934,
|
| 1933 |
+
"\u670d\u52a1\u5668": 1935,
|
| 1934 |
+
"\u7ec8\u7aef": 1936,
|
| 1935 |
+
"\u8bf4": 1937,
|
| 1936 |
+
"\u76f4\u9a71": 1938,
|
| 1937 |
+
"\u50a8\u6cb9\u7f50": 1939,
|
| 1938 |
+
"\u8584\u819c": 1940,
|
| 1939 |
+
"\u87ba\u7eb9": 1941,
|
| 1940 |
+
"\u5fae\u52a8": 1942,
|
| 1941 |
+
"\u6a2a\u5411": 1943,
|
| 1942 |
+
"\u87ba\u6813": 1944,
|
| 1943 |
+
"\u63a5\u89e6\u9762": 1945,
|
| 1944 |
+
"\u6ed1\u79fb": 1946,
|
| 1945 |
+
"\u9884\u7d27": 1947,
|
| 1946 |
+
"\u529b\u77e9": 1948,
|
| 1947 |
+
"\u673a\u55b7\u6746": 1949,
|
| 1948 |
+
"\u55b7\u6746": 1950,
|
| 1949 |
+
"\u4e8c": 1951,
|
| 1950 |
+
"\u5c1a\u672a": 1952,
|
| 1951 |
+
"\u4e0d\u80fd": 1953,
|
| 1952 |
+
"\u5149\u4f0f": 1954,
|
| 1953 |
+
"\u8bbe\u8ba1\u65b9\u6848": 1955,
|
| 1954 |
+
"MMC": 1956,
|
| 1955 |
+
"\u4e2d\u5c0f\u4f01\u4e1a": 1957,
|
| 1956 |
+
"\u4e00\u7c7b": 1958,
|
| 1957 |
+
"\u8fd9\u662f": 1959,
|
| 1958 |
+
"\u8fd9\u4e2a": 1960,
|
| 1959 |
+
"\u7275\u5f15": 1961,
|
| 1960 |
+
"\u4f20\u52a8\u7cfb\u7edf": 1962,
|
| 1961 |
+
"\u53d1\u6ce1": 1963,
|
| 1962 |
+
"\u8f66\u95f4": 1964,
|
| 1963 |
+
"\u82f1\u6587": 1965,
|
| 1964 |
+
"\u5e7f\u4e49": 1966,
|
| 1965 |
+
"\u7ebf\u6027\u5316": 1967,
|
| 1966 |
+
"\u901f\u7387": 1968,
|
| 1967 |
+
"\u9c81\u68d2": 1969,
|
| 1968 |
+
"\u221e": 1970,
|
| 1969 |
+
"\u8fd9\u6837": 1971,
|
| 1970 |
+
"\u6881\u6f31\u6e9f": 1972,
|
| 1971 |
+
"\u5112\u5b66": 1973,
|
| 1972 |
+
"\u8f6c\u53d8": 1974,
|
| 1973 |
+
"\u54f2\u5b66": 1975,
|
| 1974 |
+
"\u8bb2": 1976,
|
| 1975 |
+
"\u529b\u5ea6": 1977,
|
| 1976 |
+
"\u76d1\u7763": 1978,
|
| 1977 |
+
"\u53d7\u9650": 1979,
|
| 1978 |
+
"\u4eba\u673a\u4ea4\u4e92": 1980,
|
| 1979 |
+
"\u4ea4\u4e92": 1981,
|
| 1980 |
+
"\u9ad8\u901f": 1982,
|
| 1981 |
+
"\u60ac\u67b6": 1983,
|
| 1982 |
+
"\u672a\u80fd": 1984,
|
| 1983 |
+
"\u70ae\u957f": 1985,
|
| 1984 |
+
"\u7784\u51c6\u955c": 1986,
|
| 1985 |
+
"\u4e13\u5bb6\u7cfb\u7edf": 1987,
|
| 1986 |
+
"\u6027\u80fd\u6307\u6807": 1988,
|
| 1987 |
+
"\u90e8\u4ef6": 1989,
|
| 1988 |
+
"\u7ef4\u4fee": 1990,
|
| 1989 |
+
"\u7cbe\u5bc6": 1991,
|
| 1990 |
+
"\u5355\u8f74": 1992,
|
| 1991 |
+
"\u5168\u5c40": 1993,
|
| 1992 |
+
"\u9012\u5f52": 1994,
|
| 1993 |
+
"\u55b7": 1995,
|
| 1994 |
+
"\u6bd4\u5982": 1996,
|
| 1995 |
+
"\u65e0\u4eba\u673a": 1997,
|
| 1996 |
+
"\u8054\u7cfb\u5b9e\u9645": 1998,
|
| 1997 |
+
"\u7cbe\u76ca": 1999,
|
| 1998 |
+
"\u8ba2\u5355": 2000,
|
| 1999 |
+
"\u9762\u4e34": 2001,
|
| 2000 |
+
"\u751f\u6d3b": 2002,
|
| 2001 |
+
"PLC": 2003,
|
| 2002 |
+
"\u5185\u6a21": 2004,
|
| 2003 |
+
"\u8fdb": 2005,
|
| 2004 |
+
"\u9ad8\u7cbe\u5ea6": 2006,
|
| 2005 |
+
"\u6570\u63a7\u673a\u5e8a": 2007,
|
| 2006 |
+
"\u63a8\u529b": 2008,
|
| 2007 |
+
"\u8868\u8fbe\u5f0f": 2009,
|
| 2008 |
+
"\u57fa\u91d1": 2010,
|
| 2009 |
+
"\u5de8\u5927": 2011,
|
| 2010 |
+
"\u4e89\u8bae": 2012,
|
| 2011 |
+
"\u5b9a\u4ef7": 2013,
|
| 2012 |
+
"\u56de\u987e": 2014,
|
| 2013 |
+
"\u671f\u6743": 2015,
|
| 2014 |
+
"\u914d\u9001": 2016,
|
| 2015 |
+
"\u6539\u6027": 2017,
|
| 2016 |
+
"\u7269\u8d28": 2018,
|
| 2017 |
+
"ACM": 2019,
|
| 2018 |
+
"HPVC": 2020,
|
| 2019 |
+
"TPE": 2021,
|
| 2020 |
+
"\u8010\u70ed": 2022,
|
| 2021 |
+
"\u6cb9": 2023,
|
| 2022 |
+
"\u56de\u5f39": 2024,
|
| 2023 |
+
"\u7efc\u5408\u6027": 2025,
|
| 2024 |
+
"\u6709\u6240": 2026,
|
| 2025 |
+
"\u6750\u6599\u79d1\u5b66": 2027,
|
| 2026 |
+
"\u79d1\u5b66\u6027": 2028,
|
| 2027 |
+
"\u6309\u7167": 2029,
|
| 2028 |
+
"CAD": 2030,
|
| 2029 |
+
"CAM": 2031,
|
| 2030 |
+
"\u6563\u70ed\u5668": 2032,
|
| 2031 |
+
"\u6e29\u5ea6\u573a": 2033,
|
| 2032 |
+
"\u6c14\u6d41": 2034,
|
| 2033 |
+
"\u6708": 2035,
|
| 2034 |
+
"\u65e5": 2036,
|
| 2035 |
+
"\u8f6f\u4ef6\u7cfb\u7edf": 2037,
|
| 2036 |
+
"\u8102\u80aa\u9187": 2038,
|
| 2037 |
+
"\u805a\u6c27\u4e59\u70ef\u919a": 2039,
|
| 2038 |
+
"=": 2040,
|
| 2039 |
+
"OFDM": 2041,
|
| 2040 |
+
"\u4e00\u5e26": 2042,
|
| 2041 |
+
"\u4e00\u8def": 2043,
|
| 2042 |
+
"\u6cbf\u7ebf": 2044,
|
| 2043 |
+
"\u6d6e\u52a8": 2045,
|
| 2044 |
+
"\u6cb9\u5c01": 2046,
|
| 2045 |
+
"\u96f6\u4ef6": 2047,
|
| 2046 |
+
"O": 2048,
|
| 2047 |
+
"\u5bc6\u5c01\u5708": 2049,
|
| 2048 |
+
"\u6ed1\u52a8": 2050,
|
| 2049 |
+
"EBE": 2051,
|
| 2050 |
+
"\u9884\u5904\u7406": 2052,
|
| 2051 |
+
"\u8fed\u4ee3": 2053,
|
| 2052 |
+
"\u504f\u5fc3": 2054,
|
| 2053 |
+
"\u52a9\u884c": 2055,
|
| 2054 |
+
"\u6f14\u53d8": 2056,
|
| 2055 |
+
"\uff1f": 2057,
|
| 2056 |
+
"\u5f62\u6001": 2058,
|
| 2057 |
+
"\u5e2e\u52a9": 2059,
|
| 2058 |
+
"\uff0e": 2060,
|
| 2059 |
+
"\u58f0\u5b66": 2061,
|
| 2060 |
+
"\u91cd\u5efa": 2062,
|
| 2061 |
+
"MKR": 2063,
|
| 2062 |
+
"MTR": 2064,
|
| 2063 |
+
"\u5947\u5f02": 2065,
|
| 2064 |
+
"\u9762": 2066,
|
| 2065 |
+
"\u4f8b\u5b50": 2067,
|
| 2066 |
+
"\u73b0\u4ee3": 2068,
|
| 2067 |
+
"\u7269\u6d41": 2069,
|
| 2068 |
+
"\u8fd0\u8425": 2070,
|
| 2069 |
+
"\u7269\u7406": 2071,
|
| 2070 |
+
"\u4f9b\u7ed9": 2072,
|
| 2071 |
+
"\t": 2073,
|
| 2072 |
+
"\u5206\u6790\u6a21\u578b": 2074,
|
| 2073 |
+
"\u7565\u663e": 2075,
|
| 2074 |
+
"\u77f3\u58a8": 2076,
|
| 2075 |
+
"\u70ef": 2077,
|
| 2076 |
+
"\u8d1f\u6781": 2078,
|
| 2077 |
+
"\u591a\u5c42": 2079,
|
| 2078 |
+
"\u73b0\u91d1": 2080,
|
| 2079 |
+
"\u6df1": 2081,
|
| 2080 |
+
"\u8f67\u5236": 2082,
|
| 2081 |
+
"\u79bb\u5fc3": 2083,
|
| 2082 |
+
"\u538b\u7f29\u673a": 2084,
|
| 2083 |
+
"\u6709\u8bef": 2085,
|
| 2084 |
+
"\u4e3a\u4ec0\u4e48": 2086,
|
| 2085 |
+
"\u8bbe\u5b9a": 2087,
|
| 2086 |
+
"A": 2088,
|
| 2087 |
+
"\u68d2\u6750": 2089,
|
| 2088 |
+
"\u77eb\u76f4": 2090,
|
| 2089 |
+
"\u5f2f\u77e9": 2091,
|
| 2090 |
+
"\u66f2\u7387": 2092,
|
| 2091 |
+
"\u8f8a": 2093,
|
| 2092 |
+
"\u6700\u4f18\u63a7\u5236": 2094,
|
| 2093 |
+
"\u5ea7\u6905": 2095,
|
| 2094 |
+
"\u5f2f\u7ba1": 2096,
|
| 2095 |
+
"\u4ee5\u5185": 2097,
|
| 2096 |
+
"\u663e\u7136": 2098,
|
| 2097 |
+
"\u57f9\u80b2": 2099,
|
| 2098 |
+
"\u5706\u5468": 2100,
|
| 2099 |
+
"\u5206\u79bb\u673a": 2101,
|
| 2100 |
+
"\u789f\u7247": 2102,
|
| 2101 |
+
"\u76d2": 2103,
|
| 2102 |
+
"\u8868\u793a": 2104,
|
| 2103 |
+
"BM": 2105,
|
| 2104 |
+
"\u5168": 2106,
|
| 2105 |
+
"\u5e38\u6001": 2107,
|
| 2106 |
+
"\u8f6c\u5411": 2108,
|
| 2107 |
+
"BIM": 2109,
|
| 2108 |
+
"\u4e13\u4e1a\u5b66\u4f4d": 2110,
|
| 2109 |
+
"Zr50Cu40Al10": 2111,
|
| 2110 |
+
"\u526a\u5207": 2112,
|
| 2111 |
+
"\u6790\u51fa": 2113,
|
| 2112 |
+
"\u6837\u54c1": 2114,
|
| 2113 |
+
"\u4fdd\u6301": 2115,
|
| 2114 |
+
"\u7eb9": 2116,
|
| 2115 |
+
"\u6676\u4f53": 2117,
|
| 2116 |
+
"\u03b1": 2118,
|
| 2117 |
+
"GO": 2119,
|
| 2118 |
+
"PVDF": 2120,
|
| 2119 |
+
"\u5fae": 2121,
|
| 2120 |
+
"\u6ee4\u819c": 2122,
|
| 2121 |
+
"\u6297\u6c61\u67d3": 2123,
|
| 2122 |
+
"\u5ba2\u89c2": 2124,
|
| 2123 |
+
"\u6e05\u6d01": 2125,
|
| 2124 |
+
"\u540c\u6b65\u673a": 2126,
|
| 2125 |
+
"\u5bf9\u63a5": 2127,
|
| 2126 |
+
"\u5e76\u672a": 2128,
|
| 2127 |
+
"\u78c1\u969c": 2129,
|
| 2128 |
+
"\u5e94\u529b\u573a": 2130,
|
| 2129 |
+
"\u666e\u901a": 2131,
|
| 2130 |
+
"\u7247\u5f0f": 2132,
|
| 2131 |
+
"\u81ea\u8eab": 2133,
|
| 2132 |
+
"Cu": 2134,
|
| 2133 |
+
"\u6210\u5f62": 2135,
|
| 2134 |
+
"\u78e8\u524a": 2136,
|
| 2135 |
+
"\u667a\u80fd\u5316": 2137,
|
| 2136 |
+
"\u4f5c\u52a8\u5668": 2138,
|
| 2137 |
+
"\u5149\u6805": 2139,
|
| 2138 |
+
"\u77ac\u6001": 2140,
|
| 2139 |
+
"\u504f\u597d": 2141,
|
| 2140 |
+
"\u5e8f": 2142,
|
| 2141 |
+
"\u556e\u5408": 2143,
|
| 2142 |
+
"\u8f6e\u9f7f": 2144,
|
| 2143 |
+
"\u9644\u8fd1": 2145,
|
| 2144 |
+
"\u94dd\u57fa": 2146,
|
| 2145 |
+
"\u78b3\u5316\u7845": 2147,
|
| 2146 |
+
"\u6700\u65b0": 2148,
|
| 2147 |
+
"\u8230\u8239": 2149,
|
| 2148 |
+
"\u5148\u8fdb\u6027": 2150,
|
| 2149 |
+
"\u6c34\u6ce5": 2151,
|
| 2150 |
+
"\u5e26\u6709": 2152,
|
| 2151 |
+
"\u5eb7\u590d": 2153,
|
| 2152 |
+
"\u8ba1\u7b97\u673a": 2154,
|
| 2153 |
+
"\u4e94\u8f74": 2155,
|
| 2154 |
+
"\u4e49\u9f7f": 2156,
|
| 2155 |
+
"\u53ef\u89c6\u5316": 2157,
|
| 2156 |
+
"\u4f9d\u6cd5\u6cbb\u56fd": 2158,
|
| 2157 |
+
"\u7eb5\u5411": 2159,
|
| 2158 |
+
"\u5728\u4e8e": 2160,
|
| 2159 |
+
"\u7a00\u758f": 2161,
|
| 2160 |
+
"\u4e0d\u9519": 2162,
|
| 2161 |
+
"\u7fa4\u4f17\u89c2": 2163,
|
| 2162 |
+
"\u8df5\u884c": 2164,
|
| 2163 |
+
"\u9014\u5f84": 2165,
|
| 2164 |
+
"\u77ff\u7269": 2166,
|
| 2165 |
+
"\u65cb\u6d41": 2167,
|
| 2166 |
+
"\u4f18\u826f": 2168,
|
| 2167 |
+
"\u4eba\u4eec": 2169,
|
| 2168 |
+
"\u6743\u5229": 2170,
|
| 2169 |
+
"\u5185\u6db5": 2171,
|
| 2170 |
+
"\u7a7a\u5316": 2172,
|
| 2171 |
+
"\u649e\u51fb": 2173,
|
| 2172 |
+
"\u78b0\u649e": 2174,
|
| 2173 |
+
"15.0": 2175,
|
| 2174 |
+
"\u5316\u5b66\u53cd\u5e94": 2176,
|
| 2175 |
+
"\u5bfb\u627e": 2177,
|
| 2176 |
+
"\u51e0\u4f55": 2178,
|
| 2177 |
+
"\u673a\u5e8a": 2179,
|
| 2178 |
+
"\u5e95": 2180,
|
| 2179 |
+
"\u4e0d\u5f53": 2181,
|
| 2180 |
+
"\u8fd1": 2182,
|
| 2181 |
+
"\u65ad\u5c42": 2183,
|
| 2182 |
+
"\u6709\u9650": 2184,
|
| 2183 |
+
"\u6a21\u62df\u8ba1\u7b97": 2185,
|
| 2184 |
+
"\u79bb\u5b50": 2186,
|
| 2185 |
+
"\u5206\u79bb": 2187,
|
| 2186 |
+
"\u805a\u919a": 2188,
|
| 2187 |
+
"\u8fde\u7eed": 2189,
|
| 2188 |
+
"\u78b3\u7ea4\u7ef4": 2190,
|
| 2189 |
+
"\u51c6\u5219": 2191,
|
| 2190 |
+
"\u4f20\u52a8\u8f74": 2192,
|
| 2191 |
+
"\u52a0\u70ed": 2193,
|
| 2192 |
+
"\u4e2a\u4eba": 2194,
|
| 2193 |
+
"\u4fe1\u606f\u7f51\u7edc": 2195,
|
| 2194 |
+
"\u51b3\u7b56\u8005": 2196,
|
| 2195 |
+
"\u4f9b\u5e94\u5546": 2197,
|
| 2196 |
+
"\u4e3a\u4f8b": 2198,
|
| 2197 |
+
"\u5730\u65b9": 2199,
|
| 2198 |
+
"\u8bae\u8bba": 2200,
|
| 2199 |
+
"\u6f5c\u6cb9": 2201,
|
| 2200 |
+
"\u7ec4": 2202,
|
| 2201 |
+
"\u53d8\u7535\u7ad9": 2203,
|
| 2202 |
+
"\u4e94\u9632": 2204,
|
| 2203 |
+
"\u8870\u9000": 2205,
|
| 2204 |
+
"\u5bf9\u6bd4\u5ea6": 2206,
|
| 2205 |
+
"B1": 2207,
|
| 2206 |
+
"\u53cd\u6f14": 2208,
|
| 2207 |
+
"\u5e9f\u65e7": 2209,
|
| 2208 |
+
"\u5c1a": 2210,
|
| 2209 |
+
"\u4f7f\u7528\u8005": 2211,
|
| 2210 |
+
"\u91d1\u878d": 2212,
|
| 2211 |
+
"\u58f0\u97f3": 2213,
|
| 2212 |
+
"\u4efb\u52a1": 2214,
|
| 2213 |
+
"\u5355\u76ee": 2215,
|
| 2214 |
+
"\u6444\u50cf\u673a": 2216,
|
| 2215 |
+
"\u4ee3\u4ef7": 2217,
|
| 2216 |
+
"\u805a\u5408": 2218,
|
| 2217 |
+
"\u4e0d\u52a8\u4ea7": 2219,
|
| 2218 |
+
"\u9884\u544a": 2220,
|
| 2219 |
+
"\u767b\u8bb0": 2221,
|
| 2220 |
+
"Cf": 2222,
|
| 2221 |
+
"\u5c42\u72b6": 2223,
|
| 2222 |
+
"\u8d85\u58f0": 2224,
|
| 2223 |
+
"\u9884\u5236": 2225,
|
| 2224 |
+
"TC4": 2226,
|
| 2225 |
+
"\u710a\u7f1d": 2227,
|
| 2226 |
+
"\u7194\u6c60": 2228,
|
| 2227 |
+
"\u6307\u8282": 2229,
|
| 2228 |
+
"\u9ad8\u65af": 2230,
|
| 2229 |
+
"\u6dec\u706b": 2231,
|
| 2230 |
+
"\u5438\u6ce2\u5242": 2232,
|
| 2231 |
+
"\u5438\u6ce2": 2233,
|
| 2232 |
+
"\u672c\u8eab": 2234,
|
| 2233 |
+
"\u5e94\u5f53": 2235,
|
| 2234 |
+
"Ni": 2236,
|
| 2235 |
+
"\u9884\u5e94\u529b": 2237,
|
| 2236 |
+
"\u5bc6\u5b9e": 2238,
|
| 2237 |
+
"\u518d\u751f": 2239,
|
| 2238 |
+
"\u6df7\u51dd\u571f": 2240,
|
| 2239 |
+
"\u9aa8\u6599": 2241,
|
| 2240 |
+
"\u6881": 2242,
|
| 2241 |
+
"\u793e\u533a": 2243,
|
| 2242 |
+
"\u5408\u4f5c": 2244,
|
| 2243 |
+
"\u5ca9\u77f3": 2245,
|
| 2244 |
+
"\u6309\u952e": 2246,
|
| 2245 |
+
"\u4e8c\u7ea7": 2247,
|
| 2246 |
+
"\u6eda\u5200": 2248,
|
| 2247 |
+
"\u6da1\u6d41": 2249,
|
| 2248 |
+
"\u6cb9\u7eb8": 2250,
|
| 2249 |
+
"\u8f6e\u6bc2": 2251,
|
| 2250 |
+
"\u8230\u8f7d\u673a": 2252,
|
| 2251 |
+
"\u7275\u5f15\u8f66": 2253,
|
| 2252 |
+
"\u5236\u52a8": 2254,
|
| 2253 |
+
"\u4eba\u5de5\u667a\u80fd": 2255,
|
| 2254 |
+
"\u4eba\u8138\u8bc6\u522b": 2256,
|
| 2255 |
+
"\u4e73\u80f6\u624b\u5957": 2257,
|
| 2256 |
+
"\u7ecf\u8425\u6743": 2258,
|
| 2257 |
+
"\u571f\u5730": 2259,
|
| 2258 |
+
"\u627f\u5305\u4eba": 2260,
|
| 2259 |
+
"\u9000\u706b": 2261,
|
| 2260 |
+
"\u65f6\u95f4\u5e38\u6570": 2262,
|
| 2261 |
+
"\u70ed\u7535\u5076": 2263,
|
| 2262 |
+
"\u5206\u62e3": 2264,
|
| 2263 |
+
"\u6c14\u6ce1": 2265,
|
| 2264 |
+
"\u7384\u6b66\u5ca9": 2266,
|
| 2265 |
+
"\u884c\u661f": 2267,
|
| 2266 |
+
"\u672c\u7ae0": 2268,
|
| 2267 |
+
"\u534f\u540c\u6548\u5e94": 2269,
|
| 2268 |
+
"\u6676\u5706": 2270,
|
| 2269 |
+
"\u4e8c\u6781\u7ba1": 2271,
|
| 2270 |
+
"\u80a1\u6743": 2272,
|
| 2271 |
+
"\u6587\u5316\u9057\u4ea7": 2273,
|
| 2272 |
+
"\u4e3b\u9898": 2274,
|
| 2273 |
+
"\u76f8\u4f3c": 2275,
|
| 2274 |
+
"\u77e5\u8bc6\u4ea7\u6743": 2276,
|
| 2275 |
+
"\u9875": 2277,
|
| 2276 |
+
"\u4f5c\u54c1": 2278,
|
| 2277 |
+
"\u671f\u9650": 2279,
|
| 2278 |
+
"\u8f6e": 2280,
|
| 2279 |
+
"\u78c1\u529b": 2281,
|
| 2280 |
+
"\u6bd4\u7387": 2282,
|
| 2281 |
+
"\u6e20\u9053": 2283,
|
| 2282 |
+
"\u7535\u4e3b\u8f74": 2284,
|
| 2283 |
+
"\u673a\u68b0\u624b": 2285,
|
| 2284 |
+
"\u94a2\u7ba1": 2286,
|
| 2285 |
+
"\u725b\u817f": 2287,
|
| 2286 |
+
"\u6ce2\u6d6a": 2288,
|
| 2287 |
+
"\u6ed1\u7fd4": 2289,
|
| 2288 |
+
"\u6c34\u9762": 2290,
|
| 2289 |
+
"\u6d6e\u4f53": 2291,
|
| 2290 |
+
"\u4eba\u58eb": 2292,
|
| 2291 |
+
"\u534f\u4f5c": 2293,
|
| 2292 |
+
"\u4eba\u673a": 2294,
|
| 2293 |
+
"\u6c89\u79ef": 2295,
|
| 2294 |
+
"\u62a5\u8b66": 2296,
|
| 2295 |
+
"\u5b6a\u6676": 2297,
|
| 2296 |
+
"\u6cb9\u70df": 2298,
|
| 2297 |
+
"\u901a\u8baf": 2299,
|
| 2298 |
+
"LED": 2300,
|
| 2299 |
+
"LLC": 2301,
|
| 2300 |
+
"\u73b0\u91d1\u6d41": 2302,
|
| 2301 |
+
"\u73b0\u91d1\u6d41\u91cf": 2303,
|
| 2302 |
+
"\u9884\u7b97": 2304,
|
| 2303 |
+
"\u670d\u52a1": 2305,
|
| 2304 |
+
"\u6c34\u4e0b": 2306,
|
| 2305 |
+
"\u91cd\u5fc3": 2307,
|
| 2306 |
+
"\u87ba\u65cb\u6868": 2308,
|
| 2307 |
+
"\u60ef\u6027": 2309,
|
| 2308 |
+
"\u9006\u5224": 2310,
|
| 2309 |
+
"\u5c04\u9891": 2311,
|
| 2310 |
+
"\u91cd\u4f20": 2312,
|
| 2311 |
+
"\u6bcd\u6750": 2313,
|
| 2312 |
+
"\u9886\u5934": 2314,
|
| 2313 |
+
"\u9e1f": 2315,
|
| 2314 |
+
"\u9a6c\u6c0f\u4f53": 2316,
|
| 2315 |
+
"HAZ": 2317,
|
| 2316 |
+
"\u970d\u59c6\u65af": 2318,
|
| 2317 |
+
"\u7ecf\u9a8c\u8bba": 2319,
|
| 2318 |
+
"\u4f01\u4e1a\u5bb6": 2320,
|
| 2319 |
+
"\u5361\u5c14\u66fc\u6ee4\u6ce2": 2321,
|
| 2320 |
+
"\u4e09\u4e09\u5236": 2322,
|
| 2321 |
+
"\u653f\u6743": 2323,
|
| 2322 |
+
"\u653f\u6743\u5efa\u8bbe": 2324,
|
| 2323 |
+
"\u78b3\u5316\u787c": 2325,
|
| 2324 |
+
"\u884c\u6ce2": 2326,
|
| 2325 |
+
"\u819d\u5173\u8282": 2327,
|
| 2326 |
+
"\u4e91": 2328,
|
| 2327 |
+
"SCADA": 2329,
|
| 2328 |
+
"CPs": 2330,
|
| 2329 |
+
"\u6728\u6750": 2331,
|
| 2330 |
+
"\u5c0f\u65f6": 2332,
|
| 2331 |
+
"Ca": 2333,
|
| 2332 |
+
"\u71c3\u673a": 2334,
|
| 2333 |
+
"\u9762\u95f4": 2335,
|
| 2334 |
+
"\u7acb\u4f53\u4ed3\u5e93": 2336,
|
| 2335 |
+
"\u51fa\u5e93": 2337,
|
| 2336 |
+
"\u7eb5\u6881": 2338,
|
| 2337 |
+
"\u7fc5\u7247\u7ba1": 2339,
|
| 2338 |
+
"\u6a2a\u6446": 2340,
|
| 2339 |
+
"\u626b\u67e5": 2341,
|
| 2340 |
+
"\u7194\u8986": 2342,
|
| 2341 |
+
"\u533b\u836f\u516c\u53f8": 2343,
|
| 2342 |
+
"\u62d3\u5c55": 2344,
|
| 2343 |
+
"\u56fa\u4f53\u5e9f\u7269": 2345,
|
| 2344 |
+
"\u7a7a\u8c03": 2346,
|
| 2345 |
+
"\u6cb9\u6e29": 2347,
|
| 2346 |
+
"\u5de5\u65f6": 2348,
|
| 2347 |
+
"\u70ad\u819c": 2349,
|
| 2348 |
+
"\u5806\u77f3": 2350,
|
| 2349 |
+
"\u52a0\u5f3a\u7b4b": 2351,
|
| 2350 |
+
"\u6cb9\u7bb1": 2352,
|
| 2351 |
+
"\u8003\u6838": 2353,
|
| 2352 |
+
"\u524a\u5cf0": 2354,
|
| 2353 |
+
"\u586b\u8c37": 2355,
|
| 2354 |
+
"\u5438\u5f15": 2356,
|
| 2355 |
+
"\u5e72\u70ed\u5ca9": 2357,
|
| 2356 |
+
"\u82b1\u5c97\u5ca9": 2358,
|
| 2357 |
+
"\u4e09\u8f74": 2359,
|
| 2358 |
+
"\u03b5": 2360,
|
| 2359 |
+
"\u5750\u57ab": 2361,
|
| 2360 |
+
"\u6c14\u6d6e": 2362,
|
| 2361 |
+
"\u4e92\u8865": 2363,
|
| 2362 |
+
"\u4f9d\u5b58": 2364,
|
| 2363 |
+
"<pad>": 0,
|
| 2364 |
+
"<start>": 1,
|
| 2365 |
+
"<end>": 2,
|
| 2366 |
+
"<unk>": 3
|
| 2367 |
+
}
|
data/ai_challenger_fsauor2018_testa_20180816.zip
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a768be12cb62b4f70d99816f2c023237716129fc93785cde67ea1c777537c299
|
| 3 |
+
size 6923945
|
data/ai_challenger_fsauor2018_trainingset_20180816.zip
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:715f342e7be701d133abb21219fa1c2ef248de730aed14c5e1fdf5318dfef5f1
|
| 3 |
+
size 49569368
|
data/ai_challenger_fsauor2018_validationset_20180816.zip
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b65b4cee8749c1608e782712aac13896a5a81d023451832220ac64a01bd2abd8
|
| 3 |
+
size 7115966
|
data/ai_challenger_sentiment_analysis_testa_20180816/README.txt
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
sentiment_analysis_testa.csv 为测试集A数据文件,共15000条评论数据
|
| 2 |
+
protocol.txt 为数据集下载协议
|
data/ai_challenger_sentiment_analysis_testa_20180816/protocol.txt
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
数据集下载协议
|
| 2 |
+
|
| 3 |
+
您(以下称“研究者”)正在请求举办方授予您访问、下载并使用数据集(以下简称“数据集”)的权利(以下简称“授权”),作为获得该等授权的条件,您同意遵守以下条款:
|
| 4 |
+
|
| 5 |
+
1、研究者同意仅为非商业性的科学研究或课堂教学目的使用数据集,并不得将数据集用于任何商业用途;
|
| 6 |
+
2、我们不享有数据集中使用的图片、音频、文字等内容的知识产权,对前述内容不作任何保证,包括但不限于不侵犯他人知识产权或可将前述内容用于任何特定目的;
|
| 7 |
+
3、我们不承担因数据集使用造成的任何形式的损失或伤害,不会对任何因使用比赛数据产生的法律后果承担任何责任;
|
| 8 |
+
4、 与数据集使用有关的任何法律责任均由研究者承担,如研究者或其员工、代理人、分支机构使用数据集的行为给我们造成声誉或经济损失,研究者应当承担赔偿责任;
|
| 9 |
+
5、研究者可以授权其助手、同事或其他合作者访问和使用数据集,但应确保前述人员已经认真阅读并同意接受本协议约束;
|
| 10 |
+
6、如果研究者受雇于以盈利为目的的商业主体,应确保使用数据集仅用于非商业目的,且其雇主同样受本协议约束,研究者确认其签订本协议前已经取得雇主的充分授权。
|
| 11 |
+
7、我们有权随时取消或撤回对研究者使用数据集的授权,并有权要求研究者删除已下载数据集;
|
| 12 |
+
8、凡因本合同引起的或与本合同有关的任何争议,均应提交中国国际经济贸易仲裁委员会,按照申请仲裁时该会现行有效的仲裁规则,并适用中华人民共和国法律解决进行仲裁。仲裁语言应为中文。
|
data/ai_challenger_sentiment_analysis_testa_20180816/sentiment_analysis_testa.csv
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:807246836a9dc6974860bdb18a33be07cb850d2ed5e28c87170514fe16df33d2
|
| 3 |
+
size 15608586
|
data/ai_challenger_sentiment_analysis_testa_20180816/sentiment_analysis_testa2.csv
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f853d7296a74269ec3da9a2bf3ed1493cd422d40bd799dbb41ec9fd09543b1f1
|
| 3 |
+
size 200185
|
data/ai_challenger_sentiment_analysis_trainingset_20180816/README.txt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
sentiment_analysis_trainingset.csv 为训练集数据文件,共105000条评论数据
|
| 2 |
+
sentiment_analysis_trainingset_annotations.docx 为数据标注说明文件
|
| 3 |
+
protocol.txt 为数据集下载协议
|
data/ai_challenger_sentiment_analysis_trainingset_20180816/protocol.txt
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
数据集下载协议
|
| 2 |
+
|
| 3 |
+
您(以下称“研究者”)正在请求举办方授予您访问、下载并使用数据集(以下简称“数据集”)的权利(以下简称“授权”),作为获得该等授权的条件,您同意遵守以下条款:
|
| 4 |
+
|
| 5 |
+
1、研究者同意仅为非商业性的科学研究或课堂教学目的使用数据集,并不得将数据集用于任何商业用途;
|
| 6 |
+
2、我们不享有数据集中使用的图片、音频、文字等内容的知识产权,对前述内容不作任何保证,包括但不限于不侵犯他人知识产权或可将前述内容用于任何特定目的;
|
| 7 |
+
3、我们不承担因数据集使用造成的任何形式的损失或伤害,不会对任何因使用比赛数据产生的法律后果承担任何责任;
|
| 8 |
+
4、 与数据集使用有关的任何法律责任均由研究者承担,如研究者或其员工、代理人、分支机构使用数据集的行为给我们造成声誉或经济损失,研究者应当承担赔偿责任;
|
| 9 |
+
5、研究者可以授权其助手、同事或其他合作者访问和使用数据集,但应确保前述人员已经认真阅读并同意接受本协议约束;
|
| 10 |
+
6、如果研究者受雇于以盈利为目的的商业主体,应确保使用数据集仅用于非商业目的,且其雇主同样受本协议约束,研究者确认其签订本协议前已经取得雇主的充分授权。
|
| 11 |
+
7、我们有权随时取消或撤回对研究者使用数据集的授权,并有权要求研究者删除已下载数据集;
|
| 12 |
+
8、凡因本合同引起的或与本合同有关的任何争议,均应提交中国国际经济贸易仲裁委员会,按照申请仲裁时该会现行有效的仲裁规则,并适用中华人民共和国法律解决进行仲裁。仲裁语言应为中文。
|
data/ai_challenger_sentiment_analysis_trainingset_20180816/sentiment_analysis_trainingset.csv
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b9fe39cb75d3cbe8cb72871b963678fd56885d55fe6a80182838d3ef71955fea
|
| 3 |
+
size 112621685
|
data/ai_challenger_sentiment_analysis_trainingset_20180816/sentiment_analysis_trainingset_annotations.docx
ADDED
|
Binary file (102 kB). View file
|
|
|
data/ai_challenger_sentiment_analysis_validationset_20180816/README.txt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
sentiment_analysis_validationset.csv 为验证集数据文件,共15000条评论数据
|
| 2 |
+
sentiment_analysis_validationset_annotations.docx 为数据标注说明文件
|
| 3 |
+
protocol.txt 为数据集下载协议
|
data/ai_challenger_sentiment_analysis_validationset_20180816/protocol.txt
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
数据集下载协议
|
| 2 |
+
|
| 3 |
+
您(以下称“研究者”)正在请求举办方授予您访问、下载并使用数据集(以下简称“数据集”)的权利(以下简称“授权”),作为获得该等授权的条件,您同意遵守以下条款:
|
| 4 |
+
|
| 5 |
+
1、研究者同意仅为非商业性的科学研究或课堂教学目的使用数据集,并不得将数据集用于任何商业用途;
|
| 6 |
+
2、我们不享有数据集中使用的图片、音频、文字等内容的知识产权,对前述内容不作任何保证,包括但不限于不侵犯他人知识产权或可将前述内容用于任何特定目的;
|
| 7 |
+
3、我们不承担因数据集使用造成的任何形式的损失或伤害,不会对任何因使用比赛数据产生的法律后果承担任何责任;
|
| 8 |
+
4、 与数据集使用有关的任何法律责任均由研究者承担,如研究者或其员工、代理人、分支机构使用数据集的行为给我们造成声誉或经济损失,研究者应当承担赔偿责任;
|
| 9 |
+
5、研究者可以授权其助手、同事或其他合作者访问和使用数据集,但应确保前述人员已经认真阅读并同意接受本协议约束;
|
| 10 |
+
6、如果研究者受雇于以盈利为目的的商业主体,应确保使用数据集仅用于非商业目的,且其雇主同样受本协议约束,研究者确认其签订本协议前已经取得雇主的充分授权。
|
| 11 |
+
7、我们有权随时取消或撤回对研究者使用数据集的授权,并有权要求研究者删除已下载数据集;
|
| 12 |
+
8、凡因本合同引起的或与本合同有关的任何争议,均应提交中国国际经济贸易仲裁委员会,按照申请仲裁时该会现行有效的仲裁规则,并适用中华人民共和国法律解决进行仲裁。仲裁语言应为中文。
|
data/ai_challenger_sentiment_analysis_validationset_20180816/sentiment_analysis_validationset.csv
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a72efc85000b735c4a1e795e3f854c880887bf0a25216c8e81e5d99ddb6979f8
|
| 3 |
+
size 16016113
|
data/ai_challenger_sentiment_analysis_validationset_20180816/sentiment_analysis_validationset_annotations.docx
ADDED
|
Binary file (102 kB). View file
|
|
|
data/ai_challenger_sentiment_analysis_validationset_20180816/~$ntiment_analysis_validationset_annotations.docx
ADDED
|
Binary file (162 Bytes). View file
|
|
|
data/protocol.txt
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
数据集下载协议
|
| 2 |
+
|
| 3 |
+
您(以下称“研究者”)正在请求举办方授予您访问、下载并使用数据集(以下简称“数据集”)的权利(以下简称“授权”),作为获得该等授权的条件,您同意遵守以下条款:
|
| 4 |
+
|
| 5 |
+
1、研究者同意仅为非商业性的科学研究或课堂教学目的使用数据集,并不得将数据集用于任何商业用途;
|
| 6 |
+
2、我们不享有数据集中使用的图片、音频、文字等内容的知识产权,对前述内容不作任何保证,包括但不限于不侵犯他人知识产权或可将前述内容用于任何特定目的;
|
| 7 |
+
3、我们不承担因数据集使用造成的任何形式的损失或伤害,不会对任何因使用比赛数据产生的法律后果承担任何责任;
|
| 8 |
+
4、 与数据集使用有关的任何法律责任均由研究者承担,如研究者或其员工、代理人、分支机构使用数据集的行为给我们造成声誉或经济损失,研究者应当承担赔偿责任;
|
| 9 |
+
5、研究者可以授权其助手、同事或其他合作者访问和使用数据集,但应确保前述人员已经认真阅读并同意接受本协议约束;
|
| 10 |
+
6、如果研究者受雇于以盈利为目的的商业主体,应确保使用数据集仅用于非商业目的,且其雇主同样受本协议约束,研究者确认其签订本协议前已经取得雇主的充分授权。
|
| 11 |
+
7、我们有权随时取消或撤回对研究者使用数据集的授权,并有权要求研究者删除已下载数据集;
|
| 12 |
+
8、凡因本合同引起的或与本合同有关的任何争议,均应提交中国国际经济贸易仲裁委员会,按照申请仲裁时该会现行有效的仲裁规则,并适用中华人民共和国法律解决进行仲裁。仲裁语言应为中文。
|
data/test/test_data.csv
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e98ad61b049ebf83bc1bbcb6fdb1cfc5c044f65eed77bdcdf3983a1ed5669ba3
|
| 3 |
+
size 609792
|
data/test/test_data_old.csv
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:8b44d2ea97a6beca9822e6cd2f977ba90d7ee30cd821f1248d6939312fca9ce4
|
| 3 |
+
size 158194
|
data/total.csv
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a702dda1b1abec204cb0a389c39bcd3968fcbfc4e802ae263b72b8fe772f9c98
|
| 3 |
+
size 3102984
|
data/train/train_data.csv
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:4d470333dade24f7f5bb484c64aa66372206dddf89c0a152a9d23489aabf248e
|
| 3 |
+
size 1873975
|
data/train/train_data.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
data/train/train_data2.csv
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:4fa7c2fc2712066a55f1367fc4421e3e7206400e315b68df7defd594e1a34030
|
| 3 |
+
size 468835
|
data/train/train_data_old.csv
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:97cd8de21112574716c34b2dcaf7bfede6b32b2f3dfe19a06367c477c09aa159
|
| 3 |
+
size 469082
|
data/valid/val_data.csv
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b1834db140b02f3f530cd85272a2fbfc219b30bc9fb19e7a5893ddc81c79cfd0
|
| 3 |
+
size 616683
|
data/valid/val_data_old.csv
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c826978817a4677a8fbab1195e2ac7ff9741aa653da16f1dd7a7f3f56f844520
|
| 3 |
+
size 154559
|
data/valid/专家评语.csv
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:55621fde0640b7d171df93ff215ec75f74bd3c3b0c7605f2e6ace9e9a7992a57
|
| 3 |
+
size 535168
|
data_gen.py
ADDED
|
@@ -0,0 +1,249 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import itertools
|
| 2 |
+
|
| 3 |
+
import jieba
|
| 4 |
+
import numpy as np
|
| 5 |
+
from torch.utils.data import Dataset
|
| 6 |
+
import config
|
| 7 |
+
from tqdm import tqdm
|
| 8 |
+
from utils import *
|
| 9 |
+
import jieba_cut
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def to_categorical(y, num_classes):
|
| 13 |
+
""" 1-hot encodes a tensor """
|
| 14 |
+
return np.eye(num_classes, dtype='uint8')[y]
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
# Meaning Positive Neutral Negative Not mentioned
|
| 18 |
+
# Old labels 1 0 -1 -2
|
| 19 |
+
# New labels 3 2 1 0 2.1.0
|
| 20 |
+
def map_sentimental_type(value):
|
| 21 |
+
#return value + 2
|
| 22 |
+
return value + 1
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def parse_user_reviews(user_reviews):
|
| 26 |
+
samples = []
|
| 27 |
+
for i in range(len(user_reviews)):
|
| 28 |
+
content = user_reviews['content'][i]
|
| 29 |
+
label_tensor = np.empty((num_labels,), dtype=np.int32)
|
| 30 |
+
for idx, name in enumerate(label_names):
|
| 31 |
+
sentimental_type = user_reviews[name][i]
|
| 32 |
+
y = map_sentimental_type(sentimental_type)
|
| 33 |
+
# label_tensor[:, idx] = to_categorical(y, num_classes)
|
| 34 |
+
# CrossEntropyLoss does not expect a one-hot encoded vector as the target, but class indices.
|
| 35 |
+
label_tensor[idx] = y
|
| 36 |
+
samples.append({'content': content, 'label_tensor': label_tensor})
|
| 37 |
+
#print("samples",samples)
|
| 38 |
+
return samples
|
| 39 |
+
|
| 40 |
+
PAD, CLS, SEP = '[PAD]', '[CLS]','[SEP]' # padding符号, bert中综合信息符号
|
| 41 |
+
def parse_user_reviews_bert(user_reviews):
|
| 42 |
+
samples = []
|
| 43 |
+
pad_size=config.pad_size
|
| 44 |
+
sen_pad_size=config.sentence_pad_size
|
| 45 |
+
for i in range(len(user_reviews)):
|
| 46 |
+
content = user_reviews['content'][i]
|
| 47 |
+
sentences=jieba_cut.cut_sentence(content)
|
| 48 |
+
label_tensor = np.empty((num_labels,), dtype=np.int32)
|
| 49 |
+
sentences_ids=[]
|
| 50 |
+
sentences_mask=[]
|
| 51 |
+
for sentence in sentences:
|
| 52 |
+
token = config.tokenizer.tokenize(sentence)
|
| 53 |
+
ids = config.tokenizer.convert_tokens_to_ids(token)
|
| 54 |
+
if sen_pad_size:
|
| 55 |
+
if len(token) < sen_pad_size:
|
| 56 |
+
sentence_mask = [1] * len(ids) + [0] * (sen_pad_size - len(token))
|
| 57 |
+
ids += ([0] * (sen_pad_size - len(token)))
|
| 58 |
+
else:
|
| 59 |
+
sentence_mask = [1] * sen_pad_size
|
| 60 |
+
ids = ids[:sen_pad_size]
|
| 61 |
+
seq_len = sen_pad_size
|
| 62 |
+
#print("每个句子的长度",len(ids))
|
| 63 |
+
sentences_ids.append(ids)
|
| 64 |
+
sentences_mask.append(sentence_mask)
|
| 65 |
+
#print(sentences_ids,len(sentences_ids))
|
| 66 |
+
|
| 67 |
+
for idx, name in enumerate(label_names):
|
| 68 |
+
sentimental_type = user_reviews[name][i]
|
| 69 |
+
y = map_sentimental_type(sentimental_type)
|
| 70 |
+
# label_tensor[:, idx] = to_categorical(y, num_classes)
|
| 71 |
+
# CrossEntropyLoss does not expect a one-hot encoded vector as the target, but class indices.
|
| 72 |
+
label_tensor[idx] = y
|
| 73 |
+
token = config.tokenizer.tokenize(content)
|
| 74 |
+
#token = [CLS] + ["标题", "选题角度与价值", "文献综述归纳总结情况", "论文工作量", "是否掌握基础(专业)知识", "是否具备科研能力", "格式规范", "行文表达", "逻辑性", "研究方法", "研究结论", "创新性及论文价值", "理论深度", "学术端正性"]+token
|
| 75 |
+
aspect_token=config.tokenizer.tokenize("标题, 选题角度与价值, 文献综述归纳总结情况, 论文工作量, 是否掌握基础(专业)知识, 是否具备科研能力, 格式规范, 行文表达, 逻辑性, 研究方法, 研究结论, 创新性及论文价值, 理论深度, 学术端正性")
|
| 76 |
+
#print(aspect_token)
|
| 77 |
+
token = [CLS] + token
|
| 78 |
+
#token = [CLS] + token+ [SEP]+aspect_token+[SEP]
|
| 79 |
+
#print("token",token)
|
| 80 |
+
seq_len = len(token)
|
| 81 |
+
#print(seq_len)
|
| 82 |
+
mask = []
|
| 83 |
+
#这一步只是根据字典将字符转换为对应ID,还没有自注意力的运算
|
| 84 |
+
token_ids = config.tokenizer.convert_tokens_to_ids(token) #config.tokenizer=BertTokenizer.from_pretrained(bert_path).convert_tokens_to_ids
|
| 85 |
+
|
| 86 |
+
#print("token_ids",token_ids)
|
| 87 |
+
if pad_size:
|
| 88 |
+
if len(token) < pad_size:
|
| 89 |
+
mask = [1] * len(token_ids) + [0] * (pad_size - len(token))
|
| 90 |
+
token_ids += ([0] * (pad_size - len(token)))
|
| 91 |
+
else:
|
| 92 |
+
mask = [1] * pad_size
|
| 93 |
+
token_ids = token_ids[:pad_size]
|
| 94 |
+
seq_len = pad_size
|
| 95 |
+
#contents.append((token_ids, int(label), seq_len, mask))
|
| 96 |
+
samples.append({'content': token_ids, 'label_tensor': label_tensor,'mask':mask,'sentences':sentences_ids,'sentences_mask':sentences_mask})
|
| 97 |
+
#print("samples", samples)
|
| 98 |
+
return samples
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
def zeroPadding(l, fillvalue=PAD_token):
|
| 102 |
+
return list(itertools.zip_longest(*l, fillvalue=fillvalue))
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
# Returns padded input sequence tensor and lengths
|
| 106 |
+
def inputVar(indexes_batch):
|
| 107 |
+
lengths = torch.tensor([len(indexes) for indexes in indexes_batch])
|
| 108 |
+
padList = zeroPadding(indexes_batch)
|
| 109 |
+
padVar = torch.LongTensor(padList)
|
| 110 |
+
return padVar, lengths
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
# Returns all items for a given batch of pairs
|
| 114 |
+
def batch2TrainData(pair_batch):
|
| 115 |
+
pair_batch.sort(key=lambda x: len(x[0]), reverse=True)
|
| 116 |
+
input_batch, output_batch = [], []
|
| 117 |
+
for pair in pair_batch:
|
| 118 |
+
input_batch.append(pair[0])
|
| 119 |
+
output_batch.append(pair[1])
|
| 120 |
+
inp, lengths = inputVar(input_batch)
|
| 121 |
+
output = torch.LongTensor(output_batch)
|
| 122 |
+
return inp, lengths, output
|
| 123 |
+
|
| 124 |
+
def batch2TrainData_bert(pair_batch):
|
| 125 |
+
pair_batch.sort(key=lambda x: len(x[0]), reverse=True)
|
| 126 |
+
input_batch, output_batch,mask,sentences,sentences_mask = [], [],[],[],[]
|
| 127 |
+
for pair in pair_batch:
|
| 128 |
+
input_batch.append(pair[0])
|
| 129 |
+
output_batch.append(pair[1])
|
| 130 |
+
mask.append(pair[2])
|
| 131 |
+
sentences.append(pair[3])
|
| 132 |
+
sentences_mask.append(pair[4])
|
| 133 |
+
inp, lengths = inputVar(input_batch)
|
| 134 |
+
output = torch.LongTensor(output_batch)
|
| 135 |
+
return inp, lengths, output,mask,sentences,sentences_mask
|
| 136 |
+
|
| 137 |
+
class SaDataset(Dataset):
|
| 138 |
+
def __init__(self, split, voc,flag):
|
| 139 |
+
self.split = split
|
| 140 |
+
self.voc = voc
|
| 141 |
+
self.flag=flag
|
| 142 |
+
#assert self.split in {'train', 'valid'} #断言语句,用于检查self.split的取值是否在{'train', 'valid'}中。
|
| 143 |
+
|
| 144 |
+
if split == 'train':
|
| 145 |
+
filename = os.path.join(train_folder, train_filename)
|
| 146 |
+
elif split == 'valid':
|
| 147 |
+
filename = os.path.join(valid_folder, valid_filename)
|
| 148 |
+
else:
|
| 149 |
+
filename = os.path.join(test_a_folder, test_a_filename)
|
| 150 |
+
|
| 151 |
+
if flag=='bert':
|
| 152 |
+
user_reviews = pd.read_csv(filename)
|
| 153 |
+
self.samples = parse_user_reviews_bert(user_reviews) #到这一步都没问题,samples中含有mask
|
| 154 |
+
self.num_chunks = len(self.samples) // chunk_size
|
| 155 |
+
#print(self.samples,"sampless!!!!")
|
| 156 |
+
else:
|
| 157 |
+
# user_reviews = pd.read_csv(filename,encoding='gbk')
|
| 158 |
+
user_reviews = pd.read_csv(filename)
|
| 159 |
+
self.samples = parse_user_reviews(user_reviews)
|
| 160 |
+
self.num_chunks = len(self.samples) // chunk_size
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
def __getitem__(self, i):
|
| 164 |
+
|
| 165 |
+
pair_batch = []
|
| 166 |
+
if self.flag=='bert':
|
| 167 |
+
for i_chunk in range(chunk_size):
|
| 168 |
+
idx = i * chunk_size + i_chunk
|
| 169 |
+
content = self.samples[idx]['content']
|
| 170 |
+
input_indexes = content
|
| 171 |
+
# print(list(seg_list))
|
| 172 |
+
label_tensor = self.samples[idx]['label_tensor']
|
| 173 |
+
mask = self.samples[idx]['mask']
|
| 174 |
+
sentences=self.samples[idx]['sentences'] #分句的列表
|
| 175 |
+
sentences_mask=self.samples[idx]['sentences_mask'] #分句的mask
|
| 176 |
+
pair_batch.append((input_indexes, label_tensor,mask,sentences,sentences_mask))
|
| 177 |
+
|
| 178 |
+
return batch2TrainData_bert(pair_batch)
|
| 179 |
+
|
| 180 |
+
else:
|
| 181 |
+
for i_chunk in range(chunk_size):
|
| 182 |
+
idx = i * chunk_size + i_chunk
|
| 183 |
+
content = self.samples[idx]['content']
|
| 184 |
+
content = content.strip()
|
| 185 |
+
seg_list = jieba.cut(content)
|
| 186 |
+
input_indexes = encode_text(self.voc.word2index, list(seg_list))
|
| 187 |
+
# print(list(seg_list))
|
| 188 |
+
label_tensor = self.samples[idx]['label_tensor']
|
| 189 |
+
pair_batch.append((input_indexes, label_tensor))
|
| 190 |
+
|
| 191 |
+
return batch2TrainData(pair_batch)
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
# pair_batch = []
|
| 195 |
+
#
|
| 196 |
+
# for i_chunk in range(chunk_size):
|
| 197 |
+
# idx = i * chunk_size + i_chunk
|
| 198 |
+
# content = self.samples[idx]['content']
|
| 199 |
+
# # print(list(seg_list))
|
| 200 |
+
# label_tensor = self.samples[idx]['label_tensor']
|
| 201 |
+
# mask=self.samples[idx]['mask']
|
| 202 |
+
# pair_batch.append((content, label_tensor,mask))
|
| 203 |
+
#
|
| 204 |
+
# return pair_batch
|
| 205 |
+
|
| 206 |
+
|
| 207 |
+
def __len__(self):
|
| 208 |
+
return self.num_chunks
|
| 209 |
+
|
| 210 |
+
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
def build_dataset(config):
|
| 215 |
+
|
| 216 |
+
def load_dataset(path, pad_size=32):
|
| 217 |
+
contents = []
|
| 218 |
+
|
| 219 |
+
with open(path, 'r', encoding='UTF-8') as f:
|
| 220 |
+
for line in tqdm(f):
|
| 221 |
+
#print(line)
|
| 222 |
+
lin = line.strip()
|
| 223 |
+
#print(lin)
|
| 224 |
+
if not lin:
|
| 225 |
+
continue
|
| 226 |
+
content, label = lin.split('\t')
|
| 227 |
+
#print(label)
|
| 228 |
+
#content, label = lin.split(',')
|
| 229 |
+
token = config.tokenizer.tokenize(content)
|
| 230 |
+
#print(token)
|
| 231 |
+
token = [CLS] + token
|
| 232 |
+
seq_len = len(token)
|
| 233 |
+
mask = []
|
| 234 |
+
token_ids = config.tokenizer.convert_tokens_to_ids(token)
|
| 235 |
+
|
| 236 |
+
if pad_size:
|
| 237 |
+
if len(token) < pad_size:
|
| 238 |
+
mask = [1] * len(token_ids) + [0] * (pad_size - len(token))
|
| 239 |
+
token_ids += ([0] * (pad_size - len(token)))
|
| 240 |
+
else:
|
| 241 |
+
mask = [1] * pad_size
|
| 242 |
+
token_ids = token_ids[:pad_size]
|
| 243 |
+
seq_len = pad_size
|
| 244 |
+
contents.append((token_ids, int(label), seq_len, mask))
|
| 245 |
+
return contents
|
| 246 |
+
train = load_dataset(config.train_path, config.pad_size)
|
| 247 |
+
dev = load_dataset(config.dev_path, config.pad_size)
|
| 248 |
+
test = load_dataset(config.test_path, config.pad_size)
|
| 249 |
+
return train, dev, test
|
data_make.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
import numpy as np
|
| 3 |
+
from sklearn.model_selection import train_test_split
|
| 4 |
+
import chardet
|
| 5 |
+
|
| 6 |
+
# 检测文件的编码方式
|
| 7 |
+
with open('data/total.csv', 'rb') as f:
|
| 8 |
+
result = chardet.detect(f.read())
|
| 9 |
+
|
| 10 |
+
# 使用检测到的编码方式读取文件
|
| 11 |
+
#data = pd.read_csv('专家评语.csv', encoding=result['encoding'])
|
| 12 |
+
|
| 13 |
+
data = pd.read_csv('data/total.csv',encoding=result['encoding'])
|
| 14 |
+
|
| 15 |
+
# 划分数据集为训练集、验证集和测试集
|
| 16 |
+
train_data, test_data = train_test_split(data, test_size=0.2, random_state=42)
|
| 17 |
+
train_data, val_data = train_test_split(train_data, test_size=0.25, random_state=42)
|
| 18 |
+
|
| 19 |
+
# 将数据保存到新的CSV文件
|
| 20 |
+
train_data.to_csv('train_data.csv', index=False)
|
| 21 |
+
val_data.to_csv('val_data.csv', index=False)
|
| 22 |
+
test_data.to_csv('test_data.csv', index=False)
|
ddparser_test.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from ddparser import DDParser
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
|
| 4 |
+
ddp = DDParser(use_pos=True)
|
| 5 |
+
str = '本文基于单目视觉方法,研究了一种动态环境下的视觉SLAM系统,利用光流检测算法剔除动态特征点,从而利用静态特征点估计相机位姿,实现单目视觉的SLAM定位功能;课题具有较为重要的理论意义与实用价值。论文结构较为严谨,工作量满足学位论文要求,研究成果反映了作者专业理论知识基础较为扎实,具有一定的从事科学研究能力。'
|
| 6 |
+
result = ddp.parse(str)
|
| 7 |
+
print(result)
|
| 8 |
+
|
demo.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# import the necessary packages
|
| 2 |
+
import json
|
| 3 |
+
import os
|
| 4 |
+
import random
|
| 5 |
+
|
| 6 |
+
import jieba
|
| 7 |
+
import pandas as pd
|
| 8 |
+
import torch
|
| 9 |
+
|
| 10 |
+
from config import device, save_folder, valid_folder, valid_filename
|
| 11 |
+
from data_gen import parse_user_reviews, batch2TrainData
|
| 12 |
+
from utils import Lang, encode_text
|
| 13 |
+
|
| 14 |
+
if __name__ == '__main__':
|
| 15 |
+
voc = Lang('data/WORDMAP.json')
|
| 16 |
+
print("voc.n_words: " + str(voc.n_words))
|
| 17 |
+
|
| 18 |
+
checkpoint = '{}/BEST_checkpoint.tar'.format(save_folder) # model checkpoint
|
| 19 |
+
print('checkpoint: ' + str(checkpoint))
|
| 20 |
+
|
| 21 |
+
# Load model
|
| 22 |
+
checkpoint = torch.load(checkpoint)
|
| 23 |
+
encoder = checkpoint['encoder']
|
| 24 |
+
print(encoder)
|
| 25 |
+
|
| 26 |
+
# Use appropriate device
|
| 27 |
+
encoder = encoder.to(device)
|
| 28 |
+
|
| 29 |
+
# Set dropout layers to eval mode
|
| 30 |
+
encoder.eval()
|
| 31 |
+
|
| 32 |
+
filename = os.path.join(valid_folder, valid_filename)
|
| 33 |
+
user_reviews = pd.read_csv(filename)
|
| 34 |
+
samples = parse_user_reviews(user_reviews)
|
| 35 |
+
|
| 36 |
+
#samples = random.sample(samples, 10) #随机选取10个来验证
|
| 37 |
+
pair_batch = []
|
| 38 |
+
result = []
|
| 39 |
+
label=[]
|
| 40 |
+
for i, sample in enumerate(samples):
|
| 41 |
+
#print(i,sample)
|
| 42 |
+
content = sample['content']
|
| 43 |
+
|
| 44 |
+
result.append({'content': content})
|
| 45 |
+
#result.append({'ori_label': ori_label}) #把真实标签写入结果,方便对比查看
|
| 46 |
+
|
| 47 |
+
label_tensor = sample['label_tensor']
|
| 48 |
+
#label.append({'label':label_tensor})
|
| 49 |
+
content = content.strip()
|
| 50 |
+
seg_list = jieba.cut(content)
|
| 51 |
+
input_indexes = encode_text(voc.word2index, list(seg_list))
|
| 52 |
+
label_tensor = sample['label_tensor']
|
| 53 |
+
pair_batch.append((input_indexes, label_tensor))
|
| 54 |
+
|
| 55 |
+
#print(pair_batch[0])
|
| 56 |
+
test_data = batch2TrainData(pair_batch)
|
| 57 |
+
#print(test_data)
|
| 58 |
+
input_variable, lengths, _ = test_data
|
| 59 |
+
input_variable = input_variable.to(device)
|
| 60 |
+
lengths = lengths.to(device)
|
| 61 |
+
outputs = encoder(input_variable, lengths)
|
| 62 |
+
|
| 63 |
+
_, outputs = torch.max(outputs, 1) #_下划线表示不需要,max函数返回最大值的索引,也就是模型预测的结果
|
| 64 |
+
print(outputs.shape)
|
| 65 |
+
print('outputs.size(): ' + str(outputs.size()))
|
| 66 |
+
outputs = outputs.cpu().numpy()
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
for i in range(len(samples)):
|
| 71 |
+
result[i]['labels'] = (outputs[i] - 1).tolist()
|
| 72 |
+
result[i]['ori_label'] =(samples[i]['label_tensor']-1).tolist() #转换成list方便保存成json,-1是因为做标签映射的时候+1了
|
| 73 |
+
|
| 74 |
+
#print(result[i])
|
| 75 |
+
#print("origin",label[i]['label']-1)
|
| 76 |
+
# print("result",result[i]['labels'])
|
| 77 |
+
|
| 78 |
+
with open('result.json', 'w') as file:
|
| 79 |
+
json.dump(result, file, indent=4, ensure_ascii=False)
|
| 80 |
+
|
extract.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import zipfile
|
| 3 |
+
|
| 4 |
+
train_folder = 'data/ai_challenger_fsauor2018_trainingset_20180816'
|
| 5 |
+
valid_folder = 'data/ai_challenger_fsauor2018_validationset_20180816'
|
| 6 |
+
test_a_folder = 'data/ai_challenger_fsauor2018_testa_20180816'
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def ensure_folder(folder):
|
| 10 |
+
if not os.path.exists(folder):
|
| 11 |
+
os.makedirs(folder)
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def extract(folder):
|
| 15 |
+
filename = '{}.zip'.format(folder)
|
| 16 |
+
print('Extracting {}...'.format(filename))
|
| 17 |
+
with zipfile.ZipFile(filename, 'r') as zip_ref:
|
| 18 |
+
zip_ref.extractall('data')
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
if __name__ == '__main__':
|
| 22 |
+
ensure_folder('data')
|
| 23 |
+
|
| 24 |
+
if not os.path.isdir(train_folder):
|
| 25 |
+
extract(train_folder)
|
| 26 |
+
|
| 27 |
+
if not os.path.isdir(valid_folder):
|
| 28 |
+
extract(valid_folder)
|
| 29 |
+
|
| 30 |
+
if not os.path.isdir(test_a_folder):
|
| 31 |
+
extract(test_a_folder)
|
gbk2utf.py
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import codecs
|
| 2 |
+
|
| 3 |
+
with codecs.open('./data/valid/val_data.csv', 'r', 'gbk') as f:
|
| 4 |
+
content = f.read()
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
content_utf8 = content.encode('utf-8')
|
| 9 |
+
|
| 10 |
+
#print(content)
|
| 11 |
+
|
| 12 |
+
with codecs.open('./data/valid/val_data.csv', 'w', 'utf-8') as f:
|
| 13 |
+
f.write(content_utf8.decode('utf-8'))
|
jieba_cut.py
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
import jieba
|
| 3 |
+
import pandas as pd
|
| 4 |
+
import config
|
| 5 |
+
|
| 6 |
+
def cut_sentence(text):
|
| 7 |
+
sentence_list = []
|
| 8 |
+
seg_list = jieba.cut(text, cut_all=False)
|
| 9 |
+
seg_list = [word for word in seg_list if word.strip()] # 去除空字符
|
| 10 |
+
start = 0
|
| 11 |
+
for i, word in enumerate(seg_list):
|
| 12 |
+
if word in ['。', '!', '?',';']:
|
| 13 |
+
sentence = ''.join(seg_list[start:i+1])
|
| 14 |
+
sentence_list.append(sentence)
|
| 15 |
+
start = i + 1
|
| 16 |
+
if start < len(seg_list):
|
| 17 |
+
sentence = ''.join(seg_list[start:])
|
| 18 |
+
sentence_list.append(sentence)
|
| 19 |
+
return sentence_list
|
| 20 |
+
|
| 21 |
+
filename="data/train/train_data.csv"
|
| 22 |
+
filename2="data/total.csv"
|
| 23 |
+
user_reviews = pd.read_csv(filename)
|
| 24 |
+
#print(user_reviews)
|
| 25 |
+
max_len=0
|
| 26 |
+
# '''统计下最长句子的长度'''
|
| 27 |
+
# for i in range(len(user_reviews)):
|
| 28 |
+
# content = user_reviews['content'][i]
|
| 29 |
+
# sentences = cut_sentence(content)
|
| 30 |
+
# for sentence in sentences:
|
| 31 |
+
# length=len(sentence)
|
| 32 |
+
# #print(sentence)
|
| 33 |
+
# if length>max_len:
|
| 34 |
+
# max_len=length
|
| 35 |
+
# print("最大句子长度",max_len)
|
| 36 |
+
'''统计下前5长的句子的长度'''
|
| 37 |
+
lengths = [] # 用于存储句子长度的列表
|
| 38 |
+
conten_lengths=[]
|
| 39 |
+
for i in range(len(user_reviews)):
|
| 40 |
+
content = user_reviews['content'][i]
|
| 41 |
+
con_len=len(content)
|
| 42 |
+
conten_lengths.append(con_len)
|
| 43 |
+
sentences = cut_sentence(content)
|
| 44 |
+
for sentence in sentences:
|
| 45 |
+
length = len(sentence)
|
| 46 |
+
lengths.append(length)
|
| 47 |
+
#print(sentence)
|
| 48 |
+
# if length==518:
|
| 49 |
+
# print(sentence)
|
| 50 |
+
# 对句子长度进行排序
|
| 51 |
+
lengths.sort(reverse=True)
|
| 52 |
+
# 获取前5个最长句子的长度
|
| 53 |
+
top_5_lengths = lengths[:300]
|
| 54 |
+
# 对整段长度进行排序
|
| 55 |
+
conten_lengths.sort(reverse=True)
|
| 56 |
+
# 获取前5个最长段落的长度
|
| 57 |
+
count = 0 # 用于计数大于512的个数
|
| 58 |
+
|
| 59 |
+
for length in conten_lengths:
|
| 60 |
+
if length > 512:
|
| 61 |
+
count += 1
|
| 62 |
+
|
| 63 |
+
print("大于512的评语数:", count)
|
| 64 |
+
top_5_content_lengths = conten_lengths[:300]
|
| 65 |
+
print("前5个最长句子的长度:", top_5_lengths)
|
| 66 |
+
print("前5个最长段落的长度:", top_5_content_lengths)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
# text="再制造是一项融入节约资源、环保等理念的绿色制造模式!随着工业技术不断更新换代,大量机床面临淘汰,机床再制造已经成为我国重点扶持的一项产业。本课题以某废旧机床再制造企业为背景,深入研究了废旧机床再制造的逆向物流过程。论文选题针对性明确,实用性明显。主要研究成果:1、提出了基于GM(1,1)和傅立叶级数的废旧机床回收量预测模型,用傅立叶级数对GM(1,1)模型预测值与原数值的残差序列进行拟合,以提高废旧机床回收量的预测精度。2、构建了基于碳税约束条件下废旧机床再制造逆向物流网络模型,从碳排放总量、网络总成本两个方面对逆向物流网络进行规划设计,并进行了求解。3、运用序关系法与逼近理想解排序法的混合评价方法,建立了第三方物流供应商评价指标和体系。论文对国内外该研究领域的研究现状调研分析较全面,参考文献尚充实,研究工作结合实际,有作者的独立见解。但是论文逻辑性较差,写作不规范,数学模型的推导过程欠严谨,符号变量表示较混乱,写作质量较差,表述不清和错别字很多,标点符号问题较多,实验研究工作量小,英文写作质量较差。论文工作表明该生基本能够掌握本学科领域的基础理论和专业知识,有一定独从事科学研究工作的能力。论文尚能够达到硕士学位论文的水平和要求,同意答辩。"
|
| 71 |
+
# sentences = cut_sentence(text)
|
log.txt
ADDED
|
@@ -0,0 +1,287 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
voc.n_words: 2365
|
| 2 |
+
voc.n_words: 2365
|
| 3 |
+
Building optimizers ...
|
| 4 |
+
================================= Bert_CNN_BiGru_parms 0.02 ========================================
|
| 5 |
+
voc.n_words: 2365
|
| 6 |
+
Building optimizers ...
|
| 7 |
+
================================= Bert_CNN_BiGru_parms 0.02 ========================================
|
| 8 |
+
[2024-04-23 09:48:24] Epoch: [0][0/230] Batch Time 7.092 (7.092) Loss 1.0954 (0.1095) Accuracy 35.714 (3.571)
|
| 9 |
+
voc.n_words: 2365
|
| 10 |
+
Building optimizers ...
|
| 11 |
+
================================= Bert_CNN_BiGru_parms 0.02 ========================================
|
| 12 |
+
[2024-04-23 09:55:35] Epoch: [0][0/230] Batch Time 6.703 (6.703) Loss 1.0695 (0.1070) Accuracy 42.857 (4.286)
|
| 13 |
+
voc.n_words: 2365
|
| 14 |
+
Building optimizers ...
|
| 15 |
+
================================= Bert_CNN_BiGru_parms 0.02 ========================================
|
| 16 |
+
voc.n_words: 2365
|
| 17 |
+
Building optimizers ...
|
| 18 |
+
================================= Bert_CNN_BiGru_parms 0.02 ========================================
|
| 19 |
+
[2024-04-23 10:24:34] Epoch: [0][0/230] Batch Time 6.640 (6.640) Loss 1.1402 (0.1140) Accuracy 33.036 (3.304)
|
| 20 |
+
voc.n_words: 2365
|
| 21 |
+
Building optimizers ...
|
| 22 |
+
================================= Bert_CNN_BiGru_parms 0.02 ========================================
|
| 23 |
+
[2024-04-23 10:25:11] Epoch: [0][0/230] Batch Time 7.017 (7.017) Loss 1.1596 (0.1160) Accuracy 31.250 (3.125)
|
| 24 |
+
voc.n_words: 2365
|
| 25 |
+
Building optimizers ...
|
| 26 |
+
================================= Bert_CNN_BiGru_parms 0.02 ========================================
|
| 27 |
+
[2024-04-23 10:26:06] Epoch: [0][0/230] Batch Time 6.974 (6.974) Loss 1.1505 (0.1151) Accuracy 32.143 (3.214)
|
| 28 |
+
voc.n_words: 2365
|
| 29 |
+
Building optimizers ...
|
| 30 |
+
================================= Bert_CNN_BiGru_parms 0.02 ========================================
|
| 31 |
+
voc.n_words: 2365
|
| 32 |
+
Building optimizers ...
|
| 33 |
+
================================= Bert_CNN_BiGru_parms 0.02 ========================================
|
| 34 |
+
[2024-04-23 10:32:54] Epoch: [0][0/230] Batch Time 6.740 (6.740) Loss 1.1093 (0.1109) Accuracy 42.857 (4.286)
|
| 35 |
+
voc.n_words: 2365
|
| 36 |
+
Building optimizers ...
|
| 37 |
+
================================= Bert_CNN_BiGru_parms 0.02 ========================================
|
| 38 |
+
voc.n_words: 2365
|
| 39 |
+
Building optimizers ...
|
| 40 |
+
================================= Bert_CNN_BiGru_parms 0.02 ========================================
|
| 41 |
+
[2024-04-23 10:49:56] Epoch: [0][0/230] Batch Time 6.495 (6.495) Loss 1.1974 (0.1197) Accuracy 35.714 (3.571)
|
| 42 |
+
voc.n_words: 2365
|
| 43 |
+
Building optimizers ...
|
| 44 |
+
================================= Bert_CNN_BiGru_parms 0.02 ========================================
|
| 45 |
+
voc.n_words: 2365
|
| 46 |
+
Building optimizers ...
|
| 47 |
+
================================= Bert_CNN_BiGru_parms 0.02 ========================================
|
| 48 |
+
[2024-04-23 10:53:17] Epoch: [0][0/230] Batch Time 6.078 (6.078) Loss 1.2826 (0.1283) Accuracy 25.000 (2.500)
|
| 49 |
+
voc.n_words: 2365
|
| 50 |
+
Building optimizers ...
|
| 51 |
+
================================= Bert_CNN_BiGru_parms 0.02 ========================================
|
| 52 |
+
voc.n_words: 2365
|
| 53 |
+
Building optimizers ...
|
| 54 |
+
================================= Bert_CNN_BiGru_parms 0.02 ========================================
|
| 55 |
+
voc.n_words: 2365
|
| 56 |
+
Building optimizers ...
|
| 57 |
+
================================= Bert_CNN_BiGru_parms 0.02 ========================================
|
| 58 |
+
[2024-04-23 13:56:12] Epoch: [0][0/230] Batch Time 6.209 (6.209) Loss 1.1109 (0.1111) Accuracy 33.929 (3.393)
|
| 59 |
+
voc.n_words: 2365
|
| 60 |
+
Building optimizers ...
|
| 61 |
+
================================= Bert_gru_totalSentence 0.02 ========================================
|
| 62 |
+
voc.n_words: 2365
|
| 63 |
+
Building optimizers ...
|
| 64 |
+
================================= Bert_gru_totalSentence 0.02 ========================================
|
| 65 |
+
[2024-04-23 16:00:01] Epoch: [0][0/229] Batch Time 26.299 (26.299) Loss 1.1046 (0.1105) Accuracy 34.821 (3.482)
|
| 66 |
+
voc.n_words: 2365
|
| 67 |
+
Building optimizers ...
|
| 68 |
+
================================= Bert_gru_totalSentence 0.02 ========================================
|
| 69 |
+
voc.n_words: 2365
|
| 70 |
+
Building optimizers ...
|
| 71 |
+
================================= Bert 0.02 ========================================
|
| 72 |
+
[2024-04-23 16:03:18] Epoch: [0][0/613] Batch Time 8.473 (8.473) Loss 1.1432 (0.1143) Accuracy 28.571 (2.857)
|
| 73 |
+
voc.n_words: 2365
|
| 74 |
+
Building optimizers ...
|
| 75 |
+
================================= Bert 0.02 ========================================
|
| 76 |
+
[2024-04-23 16:45:49] Epoch: [0][0/613] Batch Time 6.602 (6.602) Loss 1.0679 (0.1068) Accuracy 30.952 (3.095)
|
| 77 |
+
voc.n_words: 2365
|
| 78 |
+
Building optimizers ...
|
| 79 |
+
================================= Bert 0.02 ========================================
|
| 80 |
+
voc.n_words: 2365
|
| 81 |
+
Building optimizers ...
|
| 82 |
+
================================= Bert 0.02 ========================================
|
| 83 |
+
voc.n_words: 2365
|
| 84 |
+
Building optimizers ...
|
| 85 |
+
================================= Bert 0.02 ========================================
|
| 86 |
+
voc.n_words: 2365
|
| 87 |
+
Building optimizers ...
|
| 88 |
+
================================= Bert 0.02 ========================================
|
| 89 |
+
voc.n_words: 2365
|
| 90 |
+
Building optimizers ...
|
| 91 |
+
================================= Bert 0.02 ========================================
|
| 92 |
+
voc.n_words: 2365
|
| 93 |
+
Building optimizers ...
|
| 94 |
+
================================= Bert 0.02 ========================================
|
| 95 |
+
voc.n_words: 2365
|
| 96 |
+
Building optimizers ...
|
| 97 |
+
================================= Bert 0.02 ========================================
|
| 98 |
+
[2024-04-23 17:01:25] Epoch: [0][0/613] Batch Time 5.616 (5.616) Loss 1.1528 (0.1153) Accuracy 28.571 (2.857)
|
| 99 |
+
voc.n_words: 2365
|
| 100 |
+
Building optimizers ...
|
| 101 |
+
================================= Bert 0.02 ========================================
|
| 102 |
+
[2024-04-24 13:59:24] Epoch: [0][0/613] Batch Time 6.501 (6.501) Loss 1.2672 (0.1267) Accuracy 26.190 (2.619)
|
| 103 |
+
voc.n_words: 2365
|
| 104 |
+
voc.n_words: 2365
|
| 105 |
+
voc.n_words: 2365
|
| 106 |
+
voc.n_words: 2365
|
| 107 |
+
voc.n_words: 2365
|
| 108 |
+
voc.n_words: 2365
|
| 109 |
+
voc.n_words: 2365
|
| 110 |
+
voc.n_words: 2365
|
| 111 |
+
Building optimizers ...
|
| 112 |
+
================================= Bert 0.02 ========================================
|
| 113 |
+
[2024-04-24 14:45:52] Epoch: [0][0/613] Batch Time 12.326 (12.326) Loss 1.3152 (0.1315) Accuracy 23.810 (2.381)
|
| 114 |
+
voc.n_words: 2365
|
| 115 |
+
Building optimizers ...
|
| 116 |
+
================================= Bert 0.02 ========================================
|
| 117 |
+
voc.n_words: 2365
|
| 118 |
+
Building optimizers ...
|
| 119 |
+
================================= Bert 0.02 ========================================
|
| 120 |
+
[2024-04-24 14:48:30] Epoch: [0][0/613] Batch Time 9.587 (9.587) Loss 1.0519 (0.1052) Accuracy 42.857 (4.286)
|
| 121 |
+
voc.n_words: 2365
|
| 122 |
+
Building optimizers ...
|
| 123 |
+
================================= Bert 0.02 ========================================
|
| 124 |
+
[2024-04-24 14:50:03] Epoch: [0][0/613] Batch Time 9.403 (9.403) Loss 1.2586 (0.1259) Accuracy 26.190 (2.619)
|
| 125 |
+
voc.n_words: 2365
|
| 126 |
+
Building optimizers ...
|
| 127 |
+
================================= Bert_CNN_BiGru 0.02 ========================================
|
| 128 |
+
voc.n_words: 2365
|
| 129 |
+
Building optimizers ...
|
| 130 |
+
================================= Bert_CNN_BiGru 0.02 ========================================
|
| 131 |
+
voc.n_words: 2365
|
| 132 |
+
Building optimizers ...
|
| 133 |
+
================================= Bert_CNN_BiGru 0.02 ========================================
|
| 134 |
+
voc.n_words: 2365
|
| 135 |
+
Building optimizers ...
|
| 136 |
+
================================= Bert_sentence 0.02 ========================================
|
| 137 |
+
[2024-04-24 15:41:59] Epoch: [0][0/613] Batch Time 11.057 (11.057) Loss 1.2508 (0.1251) Accuracy 19.048 (1.905)
|
| 138 |
+
voc.n_words: 2365
|
| 139 |
+
Building optimizers ...
|
| 140 |
+
================================= Bert_sentence 0.02 ========================================
|
| 141 |
+
|
| 142 |
+
* ACCURACY - 0.000, LOSS - 0.000
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
Epochs since last improvement: 1
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
* ACCURACY - 0.000, LOSS - 0.000
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
Epochs since last improvement: 2
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
* ACCURACY - 0.000, LOSS - 0.000
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
Epochs since last improvement: 3
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
* ACCURACY - 0.000, LOSS - 0.000
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
Epochs since last improvement: 4
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
* ACCURACY - 0.000, LOSS - 0.000
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
Epochs since last improvement: 5
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
* ACCURACY - 0.000, LOSS - 0.000
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
Epochs since last improvement: 6
|
| 176 |
+
|
| 177 |
+
voc.n_words: 2365
|
| 178 |
+
Building optimizers ...
|
| 179 |
+
================================= Bert_sentence 0.02 ========================================
|
| 180 |
+
|
| 181 |
+
* ACCURACY - 0.000, LOSS - 0.000
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
Epochs since last improvement: 1
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
* ACCURACY - 0.000, LOSS - 0.000
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
Epochs since last improvement: 2
|
| 191 |
+
|
| 192 |
+
|
| 193 |
+
* ACCURACY - 0.000, LOSS - 0.000
|
| 194 |
+
|
| 195 |
+
|
| 196 |
+
Epochs since last improvement: 3
|
| 197 |
+
|
| 198 |
+
|
| 199 |
+
* ACCURACY - 0.000, LOSS - 0.000
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
Epochs since last improvement: 4
|
| 203 |
+
|
| 204 |
+
|
| 205 |
+
* ACCURACY - 0.000, LOSS - 0.000
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
Epochs since last improvement: 5
|
| 209 |
+
|
| 210 |
+
voc.n_words: 2365
|
| 211 |
+
Building optimizers ...
|
| 212 |
+
================================= Bert_sentence 0.02 ========================================
|
| 213 |
+
|
| 214 |
+
* ACCURACY - 0.000, LOSS - 0.000
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
Epochs since last improvement: 1
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
* ACCURACY - 0.000, LOSS - 0.000
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
Epochs since last improvement: 2
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
* ACCURACY - 0.000, LOSS - 0.000
|
| 227 |
+
|
| 228 |
+
|
| 229 |
+
Epochs since last improvement: 3
|
| 230 |
+
|
| 231 |
+
|
| 232 |
+
* ACCURACY - 0.000, LOSS - 0.000
|
| 233 |
+
|
| 234 |
+
|
| 235 |
+
Epochs since last improvement: 4
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
* ACCURACY - 0.000, LOSS - 0.000
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
Epochs since last improvement: 5
|
| 242 |
+
|
| 243 |
+
|
| 244 |
+
* ACCURACY - 0.000, LOSS - 0.000
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
Epochs since last improvement: 6
|
| 248 |
+
|
| 249 |
+
voc.n_words: 2365
|
| 250 |
+
Building optimizers ...
|
| 251 |
+
================================= Bert_sentence 0.02 ========================================
|
| 252 |
+
voc.n_words: 2365
|
| 253 |
+
Building optimizers ...
|
| 254 |
+
================================= Bert_sentence 0.02 ========================================
|
| 255 |
+
[2024-04-24 16:05:07] Epoch: [0][0/613] Batch Time 6.894 (6.894) Loss 1.2131 (0.1213) Accuracy 33.333 (3.333)
|
| 256 |
+
voc.n_words: 2365
|
| 257 |
+
Building optimizers ...
|
| 258 |
+
================================= Bert_sentence 0.02 ========================================
|
| 259 |
+
[2024-04-24 16:05:40] Epoch: [0][0/613] Batch Time 5.485 (5.485) Loss 1.1047 (0.1105) Accuracy 42.857 (4.286)
|
| 260 |
+
voc.n_words: 2365
|
| 261 |
+
Building optimizers ...
|
| 262 |
+
================================= Bert_sentence 0.02 ========================================
|
| 263 |
+
voc.n_words: 2365
|
| 264 |
+
Building optimizers ...
|
| 265 |
+
================================= Bert_sentence 0.02 ========================================
|
| 266 |
+
voc.n_words: 2365
|
| 267 |
+
Building optimizers ...
|
| 268 |
+
================================= Bert_sentence3 0.02 ========================================
|
| 269 |
+
voc.n_words: 2365
|
| 270 |
+
voc.n_words: 2365
|
| 271 |
+
Building optimizers ...
|
| 272 |
+
================================= Bert_sentence3 0.02 ========================================
|
| 273 |
+
voc.n_words: 2365
|
| 274 |
+
Building optimizers ...
|
| 275 |
+
================================= Bert_sentence3 0.02 ========================================
|
| 276 |
+
voc.n_words: 2365
|
| 277 |
+
Building optimizers ...
|
| 278 |
+
================================= Bert_sentence3 0.02 ========================================
|
| 279 |
+
[2024-05-17 09:18:27] Epoch: [0][0/613] Batch Time 7.952 (7.952) Loss 1.1274 (0.1127) Accuracy 30.952 (3.095)
|
| 280 |
+
voc.n_words: 2365
|
| 281 |
+
Building optimizers ...
|
| 282 |
+
================================= Bert_sentence3 0.02 ========================================
|
| 283 |
+
[2024-05-17 09:33:19] Epoch: [0][0/613] Batch Time 7.035 (7.035) Loss 1.1134 (0.1113) Accuracy 33.333 (3.333)
|
| 284 |
+
voc.n_words: 2365
|
| 285 |
+
Building optimizers ...
|
| 286 |
+
================================= Bert_sentence3 0.02 ========================================
|
| 287 |
+
[2024-05-17 09:35:55] Epoch: [0][0/613] Batch Time 7.327 (7.327) Loss 1.0643 (0.1064) Accuracy 47.619 (4.762)
|
model2.py
ADDED
|
@@ -0,0 +1,298 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import torch.nn as nn
|
| 3 |
+
import torch.nn.functional as F
|
| 4 |
+
from torch.autograd import Variable
|
| 5 |
+
|
| 6 |
+
import config
|
| 7 |
+
from config import num_labels, num_classes, batch_first, pad_size, chunk_size, hidden_size,filter_sizes
|
| 8 |
+
from pytorch_pretrained import BertModel, BertTokenizer
|
| 9 |
+
|
| 10 |
+
import torch
|
| 11 |
+
import torch.nn as nn
|
| 12 |
+
|
| 13 |
+
import torch
|
| 14 |
+
import torch.nn as nn
|
| 15 |
+
|
| 16 |
+
import torch
|
| 17 |
+
import torch.nn as nn
|
| 18 |
+
|
| 19 |
+
class Bert_add_aspect(nn.Module):
|
| 20 |
+
def __init__(self, input_size, bert_path,hidden_size, n_layers=1, dropout=0):
|
| 21 |
+
super(Bert_add_aspect, self).__init__()
|
| 22 |
+
self.n_layers = n_layers
|
| 23 |
+
self.hidden_size = hidden_size
|
| 24 |
+
self.bert = BertModel.from_pretrained(bert_path)
|
| 25 |
+
for param in self.bert.parameters():
|
| 26 |
+
param.requires_grad = True
|
| 27 |
+
self.fc = nn.Linear(hidden_size, num_labels * num_classes)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def forward(self, input_variable,mask, hidden=None):
|
| 31 |
+
# samples=samples.T
|
| 32 |
+
# print(samples)
|
| 33 |
+
context = input_variable.T # 输入的句子
|
| 34 |
+
#print(context)
|
| 35 |
+
mask = torch.tensor(mask).to("cuda") # 对padding部分进行mask,和句子一个size,padding部分用0表示,如:[1, 1, 1, 1, 0, 0]
|
| 36 |
+
#print('mask',mask)
|
| 37 |
+
_, pooled = self.bert(context, attention_mask=mask,output_all_encoded_layers=False ) #output_all_encoded_layers=False
|
| 38 |
+
# print(pooled.size())
|
| 39 |
+
out = self.fc(pooled)
|
| 40 |
+
out = out.view((-1, num_classes,num_labels))
|
| 41 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 42 |
+
out = F.log_softmax(out, dim=1)
|
| 43 |
+
return out
|
| 44 |
+
class NonLocalBlock(nn.Module):
|
| 45 |
+
def __init__(self, in_channels, reduction=2):
|
| 46 |
+
super(NonLocalBlock, self).__init__()
|
| 47 |
+
|
| 48 |
+
self.in_channels = in_channels
|
| 49 |
+
self.reduction = reduction
|
| 50 |
+
|
| 51 |
+
self.theta = nn.Conv1d(in_channels, in_channels // reduction, kernel_size=1)
|
| 52 |
+
self.phi = nn.Conv1d(in_channels, in_channels // reduction, kernel_size=1)
|
| 53 |
+
self.g = nn.Conv1d(in_channels, in_channels // reduction, kernel_size=1)
|
| 54 |
+
self.out_conv = nn.Conv1d(in_channels // reduction, in_channels, kernel_size=1)
|
| 55 |
+
self.softmax = nn.Softmax(dim=-1)
|
| 56 |
+
|
| 57 |
+
def forward(self, x):
|
| 58 |
+
# Reshape input tensor for convolution
|
| 59 |
+
batch_size, seq_length, hidden_dim = x.size()
|
| 60 |
+
x = x.view(batch_size, hidden_dim, seq_length)
|
| 61 |
+
|
| 62 |
+
# Compute query, key, and value
|
| 63 |
+
theta = self.theta(x)
|
| 64 |
+
phi = self.phi(x)
|
| 65 |
+
g = self.g(x)
|
| 66 |
+
|
| 67 |
+
# Compute similarity scores
|
| 68 |
+
theta = theta.permute(0, 2, 1)
|
| 69 |
+
similarity_scores = torch.matmul(theta, phi)
|
| 70 |
+
similarity_scores = similarity_scores / (self.in_channels // self.reduction) ** 0.5
|
| 71 |
+
attention_weights = self.softmax(similarity_scores)
|
| 72 |
+
|
| 73 |
+
# Compute the weighted sum of values
|
| 74 |
+
g = g.permute(0, 2, 1)
|
| 75 |
+
weighted_sum = torch.matmul(attention_weights, g)
|
| 76 |
+
weighted_sum = weighted_sum.permute(0, 2, 1)
|
| 77 |
+
|
| 78 |
+
# Apply 1x1 convolution and add skip connection
|
| 79 |
+
out = self.out_conv(weighted_sum)
|
| 80 |
+
out = out + x
|
| 81 |
+
|
| 82 |
+
# Reshape output tensor to original shape
|
| 83 |
+
out = out.view(batch_size, seq_length, hidden_dim)
|
| 84 |
+
|
| 85 |
+
return out
|
| 86 |
+
|
| 87 |
+
'''加入了NonLocalBlock'''
|
| 88 |
+
class Bert_CNN_CLS_BiGru2(nn.Module):
|
| 89 |
+
def __init__(self, input_size, bert_path,hidden_size, n_layers=1, dropout=0):
|
| 90 |
+
super(Bert_CNN_CLS_BiGru2, self).__init__()
|
| 91 |
+
self.n_layers = n_layers
|
| 92 |
+
self.hidden_size = hidden_size
|
| 93 |
+
self.bert = BertModel.from_pretrained(bert_path)
|
| 94 |
+
for param in self.bert.parameters():
|
| 95 |
+
param.requires_grad = True
|
| 96 |
+
self.convs = nn.ModuleList(
|
| 97 |
+
[nn.Conv2d(in_channels=1, out_channels=256, kernel_size=(k, hidden_size)) for k in filter_sizes])
|
| 98 |
+
self.dropout = nn.Dropout(0.5)
|
| 99 |
+
self.non = NonLocalBlock(hidden_size)
|
| 100 |
+
self.gru=nn.GRU(input_size=hidden_size,hidden_size=hidden_size,num_layers=1,batch_first=True,bidirectional=True)
|
| 101 |
+
self.fc = nn.Linear(768+1536, num_labels * num_classes)
|
| 102 |
+
|
| 103 |
+
def conv_and_pool(self, x, conv):
|
| 104 |
+
x=conv(x)
|
| 105 |
+
#print("conv",x.size())
|
| 106 |
+
x = F.relu(x).squeeze(3)
|
| 107 |
+
#print("x2",x.size())
|
| 108 |
+
x = F.max_pool1d(x, x.size(2)).squeeze(2)
|
| 109 |
+
#x = F.adaptive_max_pool1d
|
| 110 |
+
#print("max_pool1d",x.size())
|
| 111 |
+
return x
|
| 112 |
+
|
| 113 |
+
def forward(self, input_variable,mask, hidden=None):
|
| 114 |
+
# samples=samples.T
|
| 115 |
+
# print(samples)
|
| 116 |
+
context = input_variable.T # 输入的句子
|
| 117 |
+
#print(context)
|
| 118 |
+
mask = torch.tensor(mask).to("cuda") # 对padding部分进行mask,和句子一个size,padding部分用0表示,如:[1, 1, 1, 1, 0, 0]
|
| 119 |
+
#print('mask',mask)
|
| 120 |
+
bert_out, pooled = self.bert(context, attention_mask=mask,output_all_encoded_layers=False ) #output_all_encoded_layers=False
|
| 121 |
+
#print(_out.size(),"bert_out")
|
| 122 |
+
#print("bert_out",bert_out.size())
|
| 123 |
+
#print("pooled_out",pooled.size())
|
| 124 |
+
bert_pool_out=pooled.unsqueeze(0)
|
| 125 |
+
non_out=self.non(bert_out)
|
| 126 |
+
#print("non_out",non_out.size())
|
| 127 |
+
out = non_out.unsqueeze(1) #增加一个维度
|
| 128 |
+
cnn_out = torch.cat([self.conv_and_pool(out, conv) for conv in self.convs], 1)
|
| 129 |
+
|
| 130 |
+
cnn_out = self.dropout(cnn_out) #不知道在哪里dropout比较好,有待研究 [8,768]
|
| 131 |
+
cnn_out=cnn_out.unsqueeze(0) #给CNN的输出增加一个维度,方便拼接,增加后的形状为[1, 8, 768]
|
| 132 |
+
#print("cnn_out",cnn_out.size())
|
| 133 |
+
gru_out,gru_h=self.gru(bert_out)
|
| 134 |
+
#print("gru_h",gru_h.size())
|
| 135 |
+
split_tensors = torch.split(gru_h, split_size_or_sections=1, dim=0)
|
| 136 |
+
#print("split_tensors",split_tensors[0].size())
|
| 137 |
+
cat_output = torch.cat(split_tensors, dim=2) #拼接正向输出和反向输出
|
| 138 |
+
#print('cat_output',cat_output.size())
|
| 139 |
+
#cat_cnn_gru=torch.cat([cat_output,cnn_out],dim=2)
|
| 140 |
+
cat_cnn_gru=torch.cat([cat_output,cnn_out],dim=2)
|
| 141 |
+
#print("cat_cnn_gru",cat_cnn_gru.size())
|
| 142 |
+
#out = torch.cat([bert_out,out],dim=1) #在第二个维度做拼接 [8,501,768]
|
| 143 |
+
#print("cat",out.size())
|
| 144 |
+
#out,h_n = self.gru(out) #[8,501,768]
|
| 145 |
+
#h_n=h_n.unsqueeze(0)
|
| 146 |
+
#print(h_n.size(),"gru_out")
|
| 147 |
+
out = self.fc(cat_cnn_gru) #[768,12*3]
|
| 148 |
+
#print(out.size(),"fc_out")
|
| 149 |
+
out = out.view((-1, num_classes,num_labels))
|
| 150 |
+
#print(out)
|
| 151 |
+
|
| 152 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 153 |
+
out = F.log_softmax(out, dim=1) #[0,0,1] [1,0,0] [1,0,01]
|
| 154 |
+
return out
|
| 155 |
+
|
| 156 |
+
class Bert_gru_totalSentence(nn.Module):
|
| 157 |
+
def __init__(self, input_size, bert_path, hidden_size, n_layers=1, dropout=0):
|
| 158 |
+
super(Bert_gru_totalSentence, self).__init__()
|
| 159 |
+
self.n_layers = n_layers
|
| 160 |
+
self.hidden_size = hidden_size
|
| 161 |
+
self.bert = BertModel.from_pretrained(bert_path)
|
| 162 |
+
for param in self.bert.parameters():
|
| 163 |
+
param.requires_grad = True
|
| 164 |
+
self.gru = nn.GRU(input_size=hidden_size, hidden_size=100, num_layers=1, batch_first=True,
|
| 165 |
+
bidirectional=False)
|
| 166 |
+
self.gru2=nn.GRU(input_size=500*100, hidden_size=hidden_size, num_layers=1, batch_first=True,
|
| 167 |
+
bidirectional=False)
|
| 168 |
+
self.fc = nn.Linear(hidden_size, num_labels * num_classes)
|
| 169 |
+
|
| 170 |
+
def forward(self, input_variable, mask, hidden=None):
|
| 171 |
+
# samples=samples.T
|
| 172 |
+
# print(samples)
|
| 173 |
+
context = input_variable.T # 输入的句子
|
| 174 |
+
# print(context)
|
| 175 |
+
mask = torch.tensor(mask) # 对padding部分进行mask,和句子一个size,padding部分用0表示,如:[1, 1, 1, 1, 0, 0]
|
| 176 |
+
# print('mask',mask)
|
| 177 |
+
_out, pooled = self.bert(context, attention_mask=mask,
|
| 178 |
+
output_all_encoded_layers=False) # output_all_encoded_layers=False
|
| 179 |
+
#print(_out.size(), "bert_out")
|
| 180 |
+
out, h_n = self.gru(_out)
|
| 181 |
+
#print(h_n.size(), "hn_out", out.size(), "gru_out")
|
| 182 |
+
out = out.reshape(8, 50000)
|
| 183 |
+
out, h_n = self.gru2(out)
|
| 184 |
+
gru_out2=out.unsqueeze(0)
|
| 185 |
+
#print(h_n.size(), "hn_out2", out.size(), "gru_out2")
|
| 186 |
+
out = self.fc(gru_out2)
|
| 187 |
+
# print(out.size(),"fc_out")
|
| 188 |
+
out = out.view((-1, num_classes, num_labels))
|
| 189 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 190 |
+
out = F.log_softmax(out, dim=1)
|
| 191 |
+
return out
|
| 192 |
+
|
| 193 |
+
class Bert_my_vocab(nn.Module):
|
| 194 |
+
def __init__(self, input_size, bert_path,hidden_size, n_layers=1, dropout=0):
|
| 195 |
+
super(Bert_my_vocab, self).__init__()
|
| 196 |
+
self.n_layers = n_layers
|
| 197 |
+
self.hidden_size = hidden_size
|
| 198 |
+
self.bert = BertModel.from_pretrained(bert_path)
|
| 199 |
+
for param in self.bert.parameters():
|
| 200 |
+
param.requires_grad = True
|
| 201 |
+
self.fc = nn.Linear(hidden_size, num_labels * num_classes)
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
def forward(self, input_variable,mask, hidden=None):
|
| 205 |
+
# samples=samples.T
|
| 206 |
+
# print(samples)
|
| 207 |
+
context = input_variable.T # 输入的句子
|
| 208 |
+
#print(context)
|
| 209 |
+
mask = torch.tensor(mask).to("cuda") # 对padding部分进行mask,和句子一个size,padding部分用0表示,如:[1, 1, 1, 1, 0, 0]
|
| 210 |
+
#print('mask',mask)
|
| 211 |
+
_, pooled = self.bert(context, attention_mask=mask,output_all_encoded_layers=False ) #output_all_encoded_layers=False
|
| 212 |
+
# print(pooled.size())
|
| 213 |
+
out = self.fc(pooled)
|
| 214 |
+
out = out.view((-1, num_classes,num_labels))
|
| 215 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 216 |
+
out = F.log_softmax(out, dim=1)
|
| 217 |
+
return out
|
| 218 |
+
|
| 219 |
+
'''Bert-CNN-BiGRU冻结bert参数'''
|
| 220 |
+
class Bert_CNN_BiGru_parms(nn.Module):
|
| 221 |
+
def __init__(self, input_size, bert_path,hidden_size, n_layers=1, dropout=0):
|
| 222 |
+
super(Bert_CNN_BiGru_parms, self).__init__()
|
| 223 |
+
self.n_layers = n_layers
|
| 224 |
+
self.hidden_size = hidden_size
|
| 225 |
+
self.bert = BertModel.from_pretrained(bert_path)
|
| 226 |
+
#self.att=nn.MultiheadAttention()
|
| 227 |
+
# 加载保存的参数字典
|
| 228 |
+
#saved_params = torch.load('models/checkpoint_8_80.956.tar',map_location=torch.device('cpu')) # 假设保存的参数在'saved_model.pth'文件中
|
| 229 |
+
# 创建一个新的空字典,用于存储要加载到模型中的参数
|
| 230 |
+
#new_state_dict = {}
|
| 231 |
+
# 从保存的参数字典中筛选出与BERT模型的参数名称匹配的键值对
|
| 232 |
+
# for key in saved_params.keys():
|
| 233 |
+
# if key.startswith('bert.'):
|
| 234 |
+
# new_state_dict[key] = saved_params[key]
|
| 235 |
+
# self.bert.load_state_dict(new_state_dict)
|
| 236 |
+
for param in self.bert.parameters():
|
| 237 |
+
param.requires_grad = False
|
| 238 |
+
self.convs = nn.ModuleList(
|
| 239 |
+
[nn.Conv2d(in_channels=1, out_channels=256, kernel_size=(k, hidden_size)) for k in filter_sizes])
|
| 240 |
+
for param in self.convs.parameters():
|
| 241 |
+
param.requires_grad = True
|
| 242 |
+
self.dropout = nn.Dropout(0.5)
|
| 243 |
+
self.gru=nn.GRU(input_size=hidden_size,hidden_size=hidden_size,num_layers=1,batch_first=True,bidirectional=True)
|
| 244 |
+
self.fc = nn.Linear(2304, num_labels * num_classes)
|
| 245 |
+
for param in self.gru.parameters():
|
| 246 |
+
param.requires_grad = True
|
| 247 |
+
for param in self.fc.parameters():
|
| 248 |
+
param.requires_grad = True
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
def conv_and_pool(self, x, conv):
|
| 252 |
+
x=conv(x)
|
| 253 |
+
#print("conv",x.size())
|
| 254 |
+
x = F.relu(x).squeeze(3)
|
| 255 |
+
#print("x",x)
|
| 256 |
+
#print("x2",x.size())
|
| 257 |
+
x = F.max_pool1d(x, x.size(2)).squeeze(2)
|
| 258 |
+
#print("max_pool1d",x.size())
|
| 259 |
+
return x
|
| 260 |
+
|
| 261 |
+
def forward(self, input_variable,mask, hidden=None):
|
| 262 |
+
# samples=samples.T
|
| 263 |
+
# print(samples)
|
| 264 |
+
context = input_variable.T # 输入的句子
|
| 265 |
+
#print(context)
|
| 266 |
+
mask = torch.tensor(mask) # 对padding部分进行mask,和句子一个size,padding部分用0表示,如:[1, 1, 1, 1, 0, 0]
|
| 267 |
+
#print('mask',mask)
|
| 268 |
+
bert_out, pooled = self.bert(context, attention_mask=mask,output_all_encoded_layers=False ) #output_all_encoded_layers=False
|
| 269 |
+
#print(_out.size(),"bert_out")
|
| 270 |
+
#print("bert_out",bert_out.size())
|
| 271 |
+
out = bert_out.unsqueeze(1) #增加一个维度
|
| 272 |
+
#print("bert",out.size())
|
| 273 |
+
cnn_out = torch.cat([self.conv_and_pool(out, conv) for conv in self.convs], 1)
|
| 274 |
+
#print("cnn_out",cnn_out.size())
|
| 275 |
+
cnn_out = self.dropout(cnn_out) #不知道在哪里dropout比较好,有待研究 [8,768]
|
| 276 |
+
cnn_out=cnn_out.unsqueeze(0) #给CNN的输出增加一个维度,方便拼接,增加后的形状为[1, 8, 768]
|
| 277 |
+
gru_out,gru_h=self.gru(bert_out)
|
| 278 |
+
#print("gru_h",gru_h.size(),"gru_out",gru_out.size())
|
| 279 |
+
split_tensors = torch.split(gru_h, split_size_or_sections=1, dim=0)
|
| 280 |
+
#print("split_tensors",split_tensors[0].size())
|
| 281 |
+
cat_output = torch.cat(split_tensors, dim=2) #拼接正向输出和反向输出
|
| 282 |
+
#print('cat_gru_output',cat_output.size())
|
| 283 |
+
cat_cnn_gru=torch.cat([cat_output,cnn_out],dim=2)
|
| 284 |
+
#print("cat_cnn_gru",cat_cnn_gru.size())
|
| 285 |
+
#out = torch.cat([bert_out,out],dim=1) #在第二个维度做拼接 [8,501,768]
|
| 286 |
+
#print("cat",out.size())
|
| 287 |
+
#out,h_n = self.gru(out) #[8,501,768]
|
| 288 |
+
#h_n=h_n.unsqueeze(0)
|
| 289 |
+
#print(h_n.size(),"gru_out")
|
| 290 |
+
out = self.fc(cat_cnn_gru) #[768,12*3]
|
| 291 |
+
#print("fc_out",out.size())
|
| 292 |
+
out = out.view((-1, num_classes,num_labels))
|
| 293 |
+
#print("view_out",out.size())
|
| 294 |
+
|
| 295 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 296 |
+
out = F.log_softmax(out, dim=1) #[0,0,1] [1,0,0] [1,0,01]
|
| 297 |
+
#print("log_softmax_out",out.size())
|
| 298 |
+
return out
|
models.py
ADDED
|
@@ -0,0 +1,873 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import torch.nn as nn
|
| 3 |
+
import torch.nn.functional as F
|
| 4 |
+
from torch.autograd import Variable
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
from config import num_labels, num_classes, batch_first, pad_size, chunk_size, hidden_size,filter_sizes
|
| 8 |
+
from pytorch_pretrained import BertModel, BertTokenizer
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class EncoderRNN(nn.Module):
|
| 12 |
+
def __init__(self, input_size, hidden_size, n_layers=1, dropout=0):
|
| 13 |
+
super(EncoderRNN, self).__init__()
|
| 14 |
+
self.n_layers = n_layers
|
| 15 |
+
self.hidden_size = hidden_size
|
| 16 |
+
self.embedding = nn.Embedding(input_size, hidden_size)
|
| 17 |
+
|
| 18 |
+
# Initialize GRU; the input_size and hidden_size params are both set to 'hidden_size'
|
| 19 |
+
# because our input size is a word embedding with number of features == hidden_size
|
| 20 |
+
self.gru = nn.GRU(hidden_size, hidden_size, n_layers,
|
| 21 |
+
dropout=(0 if n_layers == 1 else dropout), bidirectional=True)
|
| 22 |
+
self.fc = nn.Linear(hidden_size, num_labels * num_classes)
|
| 23 |
+
|
| 24 |
+
def forward(self, input_seq, input_lengths, hidden=None):
|
| 25 |
+
# input_seq = [sent len, batch size]
|
| 26 |
+
# Convert word indexes to embeddings
|
| 27 |
+
embedded = self.embedding(input_seq)
|
| 28 |
+
# embedded = [sent len, batch size, hidden size]
|
| 29 |
+
# Pack padded batch of sequences for RNN module
|
| 30 |
+
packed = torch.nn.utils.rnn.pack_padded_sequence(embedded, input_lengths)
|
| 31 |
+
# Forward pass through GRU
|
| 32 |
+
outputs, hidden = self.gru(packed, hidden)
|
| 33 |
+
# Unpack padding
|
| 34 |
+
outputs, _ = torch.nn.utils.rnn.pad_packed_sequence(outputs)
|
| 35 |
+
# Sum bidirectional GRU outputs
|
| 36 |
+
outputs = outputs[:, :, :self.hidden_size] + outputs[:, :, self.hidden_size:]
|
| 37 |
+
# outputs = [sent len, batch size, hidden size]
|
| 38 |
+
# outputs = outputs[-1]
|
| 39 |
+
|
| 40 |
+
# Extract the outputs for the last timestep of each example
|
| 41 |
+
idx = (input_lengths - 1).view(-1, 1).expand(
|
| 42 |
+
len(input_lengths), outputs.size(2))
|
| 43 |
+
time_dimension = 1 if batch_first else 0
|
| 44 |
+
idx = idx.unsqueeze(time_dimension)
|
| 45 |
+
# Shape: (batch_size, rnn_hidden_dim)
|
| 46 |
+
outputs = outputs.gather(
|
| 47 |
+
time_dimension, Variable(idx)).squeeze(time_dimension)
|
| 48 |
+
|
| 49 |
+
# outputs = [batch size, hidden size]
|
| 50 |
+
outputs = self.fc(outputs)
|
| 51 |
+
# outputs = [batch size, num_labels * num_classes]
|
| 52 |
+
outputs = outputs.view((-1, num_classes, num_labels))
|
| 53 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 54 |
+
outputs = F.log_softmax(outputs, dim=1)
|
| 55 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 56 |
+
|
| 57 |
+
# Return output
|
| 58 |
+
return outputs
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
class Bert(nn.Module):
|
| 62 |
+
def __init__(self, input_size, bert_path,hidden_size, n_layers=1, dropout=0):
|
| 63 |
+
super(Bert, self).__init__()
|
| 64 |
+
self.n_layers = n_layers
|
| 65 |
+
self.hidden_size = hidden_size
|
| 66 |
+
self.bert = BertModel.from_pretrained(bert_path)
|
| 67 |
+
for param in self.bert.parameters():
|
| 68 |
+
param.requires_grad = True
|
| 69 |
+
self.fc = nn.Linear(hidden_size, num_labels * num_classes)
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
def forward(self, input_variable,mask, hidden=None):
|
| 73 |
+
# samples=samples.T
|
| 74 |
+
# print(samples)
|
| 75 |
+
context = input_variable.T # 输入的句子
|
| 76 |
+
#print(context)
|
| 77 |
+
mask = torch.tensor(mask).to("cuda") # 对padding部分进行mask,和句子一个size,padding部分用0表示,如:[1, 1, 1, 1, 0, 0]
|
| 78 |
+
#print('mask',mask)
|
| 79 |
+
_, pooled = self.bert(context, attention_mask=mask,output_all_encoded_layers=False ) #output_all_encoded_layers=False
|
| 80 |
+
# print(pooled.size())
|
| 81 |
+
out = self.fc(pooled)
|
| 82 |
+
out = out.view((-1, num_classes,num_labels))
|
| 83 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 84 |
+
out = F.log_softmax(out, dim=1)
|
| 85 |
+
return out
|
| 86 |
+
|
| 87 |
+
class Bert_sentence(nn.Module):
|
| 88 |
+
def __init__(self, input_size, bert_path,hidden_size, n_layers=1, dropout=0):
|
| 89 |
+
super(Bert_sentence, self).__init__()
|
| 90 |
+
self.n_layers = n_layers
|
| 91 |
+
self.hidden_size = hidden_size
|
| 92 |
+
self.bert = BertModel.from_pretrained(bert_path)
|
| 93 |
+
for param in self.bert.parameters():
|
| 94 |
+
param.requires_grad = True
|
| 95 |
+
self.bigru=nn.GRU(input_size=hidden_size,hidden_size=hidden_size,num_layers=1,batch_first=True,bidirectional=False)
|
| 96 |
+
self.fc = nn.Linear(hidden_size*2, num_labels * num_classes)
|
| 97 |
+
self.att=nn.MultiheadAttention(embed_dim=768,num_heads=8,batch_first=True)
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
def forward(self, input_variable,mask,sentences,sentences_mask, hidden=None):
|
| 101 |
+
# samples=samples.T
|
| 102 |
+
# print(samples)
|
| 103 |
+
#print("sentences_mask",sentences_mask,len(sentences_mask))
|
| 104 |
+
context = input_variable.T # 输入的句子
|
| 105 |
+
mask = torch.tensor(mask).to("cuda") # 对padding部分进行mask,和句子一个size,padding部分用0表示,如:[1, 1, 1, 1, 0, 0]
|
| 106 |
+
#sentences_mask = torch.tensor(sentences_mask).to("cuda") # 对padding部分进行mask,和句子一个size,padding部分用0表示,如:[1, 1, 1, 1, 0, 0]
|
| 107 |
+
_, pooled = self.bert(context, attention_mask=mask,output_all_encoded_layers=False) #output_all_encoded_layers=False
|
| 108 |
+
sentence_feature=[]
|
| 109 |
+
for paragraph,sentence_mask in zip(sentences,sentences_mask):
|
| 110 |
+
s_pooled_list = [] # 创建一个空列表用于保存 s_pooled
|
| 111 |
+
for sentence,sen_mask in zip(paragraph,sentence_mask):
|
| 112 |
+
sentence=torch.tensor(sentence).to("cuda")
|
| 113 |
+
sen_mask=torch.tensor(sen_mask).to("cuda")
|
| 114 |
+
sentence=torch.unsqueeze(sentence,dim=0)
|
| 115 |
+
sen_mask=torch.unsqueeze(sen_mask,dim=0)
|
| 116 |
+
s_, s_pooled = self.bert(sentence,sen_mask,output_all_encoded_layers=False)
|
| 117 |
+
# 将 s_pooled 添加到列表中
|
| 118 |
+
s_pooled_list.append(s_pooled)
|
| 119 |
+
#print("s_pooled",s_pooled.size(),s_pooled,"s_",s_.size(),s_)
|
| 120 |
+
# 在循环结束后,将 s_pooled_list 中的张量连接为一个张量
|
| 121 |
+
s_pooled_tensor = torch.cat(s_pooled_list, dim=0)
|
| 122 |
+
att_out, attention_weights=self.att(s_pooled_tensor,s_pooled_tensor,s_pooled_tensor)
|
| 123 |
+
print("att_out",att_out.size(),"attention_weights",attention_weights.size(),attention_weights)
|
| 124 |
+
gru_out,h_n=self.bigru(att_out)
|
| 125 |
+
sentence_feature.append(h_n)
|
| 126 |
+
print("gru_out",gru_out.size(),"h_n", h_n.size())
|
| 127 |
+
sentence_feature_tensor = torch.cat(sentence_feature, dim=0)
|
| 128 |
+
#s_, s_pooled = self.bert(sentences,output_all_encoded_layers=False ) #output_all_encoded_layers=False
|
| 129 |
+
#print("sentence_out",s_pooled)
|
| 130 |
+
cat_out=torch.cat((sentence_feature_tensor,pooled),dim=1)
|
| 131 |
+
out = self.fc(cat_out)
|
| 132 |
+
out = out.view((-1, num_classes,num_labels))
|
| 133 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 134 |
+
out = F.log_softmax(out, dim=1)
|
| 135 |
+
return out
|
| 136 |
+
|
| 137 |
+
'''得到句子的CLS后不使用GRU,而是用最大池化'''
|
| 138 |
+
class Bert_sentence3(nn.Module):
|
| 139 |
+
def __init__(self, input_size, bert_path,hidden_size, n_layers=1, dropout=0):
|
| 140 |
+
super(Bert_sentence3, self).__init__()
|
| 141 |
+
self.n_layers = n_layers
|
| 142 |
+
self.hidden_size = hidden_size
|
| 143 |
+
self.bert = BertModel.from_pretrained(bert_path)
|
| 144 |
+
for param in self.bert.parameters():
|
| 145 |
+
param.requires_grad = True
|
| 146 |
+
self.bigru=nn.GRU(input_size=hidden_size,hidden_size=hidden_size,num_layers=1,batch_first=True,bidirectional=False)
|
| 147 |
+
self.fc = nn.Linear(hidden_size, num_labels * num_classes)
|
| 148 |
+
self.att=nn.MultiheadAttention(embed_dim=768,num_heads=12,batch_first=True)
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
def forward(self, input_variable,mask,sentences,sentences_mask, hidden=None):
|
| 152 |
+
# samples=samples.T
|
| 153 |
+
# print(samples)
|
| 154 |
+
#print("sentences_mask",sentences_mask,len(sentences_mask))
|
| 155 |
+
context = input_variable.T # 输入的句子
|
| 156 |
+
mask = torch.tensor(mask) # 对padding部分进行mask,和句子一个size,padding部分用0表示,如:[1, 1, 1, 1, 0, 0]
|
| 157 |
+
#sentences_mask = torch.tensor(sentences_mask).to("cuda") # 对padding部分进行mask,和句子一个size,padding部分用0表示,如:[1, 1, 1, 1, 0, 0]
|
| 158 |
+
_, pooled = self.bert(context, attention_mask=mask,output_all_encoded_layers=False) #output_all_encoded_layers=False
|
| 159 |
+
sentence_feature=[]
|
| 160 |
+
for paragraph,sentence_mask in zip(sentences,sentences_mask):
|
| 161 |
+
s_pooled_list = [] # 创建一个空列表用于保存 s_pooled
|
| 162 |
+
for sentence,sen_mask in zip(paragraph,sentence_mask):
|
| 163 |
+
sentence=torch.tensor(sentence)
|
| 164 |
+
sen_mask=torch.tensor(sen_mask)
|
| 165 |
+
sentence=torch.unsqueeze(sentence,dim=0)
|
| 166 |
+
sen_mask=torch.unsqueeze(sen_mask,dim=0)
|
| 167 |
+
s_, s_pooled = self.bert(sentence,sen_mask,output_all_encoded_layers=False)
|
| 168 |
+
# 将 s_pooled 添加到列表中
|
| 169 |
+
s_pooled_list.append(s_pooled)
|
| 170 |
+
#print("s_pooled",s_pooled.size(),s_pooled,"s_",s_.size(),s_)
|
| 171 |
+
# 在循环结束后,将 s_pooled_list 中的张量连接为一个张量
|
| 172 |
+
s_pooled_tensor = torch.cat(s_pooled_list, dim=0)
|
| 173 |
+
att_out, attention_weights=self.att(s_pooled_tensor,s_pooled_tensor,s_pooled_tensor)
|
| 174 |
+
# 最大池化操作
|
| 175 |
+
att_out_max, _ = torch.max(att_out, dim=0, keepdim=True) # 在第0维度上获取最大值,并保持维度为[1, 768]
|
| 176 |
+
# print("att_out",att_out.size(),"attention_weights",attention_weights.size(),attention_weights)
|
| 177 |
+
# gru_out,h_n=self.bigru(att_out)
|
| 178 |
+
sentence_feature.append(att_out_max)
|
| 179 |
+
sentence_feature_tensor = torch.cat(sentence_feature, dim=0)
|
| 180 |
+
#print("sentence_feature_tensor",sentence_feature_tensor.size())
|
| 181 |
+
#s_, s_pooled = self.bert(sentences,output_all_encoded_layers=False ) #output_all_encoded_layers=False
|
| 182 |
+
#print("sentence_out",s_pooled)
|
| 183 |
+
#cat_out=torch.cat((sentence_feature_tensor,pooled),dim=1)
|
| 184 |
+
out = self.fc(sentence_feature_tensor)
|
| 185 |
+
#print(self.fc)
|
| 186 |
+
out = out.view((-1, num_classes,num_labels))
|
| 187 |
+
#print(out.size())
|
| 188 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 189 |
+
out = F.log_softmax(out, dim=1)
|
| 190 |
+
return out
|
| 191 |
+
|
| 192 |
+
class Bert_GRU(nn.Module):
|
| 193 |
+
def __init__(self, input_size, bert_path,hidden_size, n_layers=1, dropout=0):
|
| 194 |
+
super(Bert_GRU, self).__init__()
|
| 195 |
+
self.n_layers = n_layers
|
| 196 |
+
self.hidden_size = hidden_size
|
| 197 |
+
self.bert = BertModel.from_pretrained(bert_path)
|
| 198 |
+
for param in self.bert.parameters():
|
| 199 |
+
param.requires_grad = True
|
| 200 |
+
self.gru=nn.GRU(hidden_size,hidden_size,1,batch_first=True,bidirectional=False)
|
| 201 |
+
self.fc = nn.Linear(hidden_size, num_labels * num_classes)
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
def forward(self, input_variable,mask, hidden=None):
|
| 205 |
+
# samples=samples.T
|
| 206 |
+
# print(samples)
|
| 207 |
+
context = input_variable.T # 输入的句子
|
| 208 |
+
#print(context)
|
| 209 |
+
mask = torch.tensor(mask).to("cuda") # 对padding部分进行mask,和句子一个size,padding部分用0表示,如:[1, 1, 1, 1, 0, 0]
|
| 210 |
+
#print('mask',mask)
|
| 211 |
+
_, pooled = self.bert(context, attention_mask=mask,output_all_encoded_layers=False ) #output_all_encoded_layers=False
|
| 212 |
+
print(pooled.size())
|
| 213 |
+
_,out = self.gru(pooled)
|
| 214 |
+
print(out.size())
|
| 215 |
+
out = self.fc(out)
|
| 216 |
+
print(out.size())
|
| 217 |
+
out = out.view((-1, num_classes,num_labels))
|
| 218 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 219 |
+
out = F.log_softmax(out, dim=1)
|
| 220 |
+
return out
|
| 221 |
+
|
| 222 |
+
'''Bert还未池化的输出后面接gru,相当于用bert编码,用gru提起特征'''
|
| 223 |
+
class Bert_GRU2(nn.Module):
|
| 224 |
+
def __init__(self, input_size, bert_path,hidden_size, n_layers=1, dropout=0):
|
| 225 |
+
super(Bert_GRU2, self).__init__()
|
| 226 |
+
self.n_layers = n_layers
|
| 227 |
+
self.hidden_size = hidden_size
|
| 228 |
+
self.bert = BertModel.from_pretrained(bert_path)
|
| 229 |
+
for param in self.bert.parameters():
|
| 230 |
+
param.requires_grad = True
|
| 231 |
+
# for param in self.parameters():
|
| 232 |
+
# param.requires_grad = True
|
| 233 |
+
self.gru=nn.GRU(input_size=hidden_size,hidden_size=hidden_size,num_layers=1,batch_first=True,bidirectional=False)
|
| 234 |
+
self.fc = nn.Linear(hidden_size, num_labels * num_classes)
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
def forward(self, input_variable,mask, hidden=None):
|
| 238 |
+
# samples=samples.T
|
| 239 |
+
# print(samples)
|
| 240 |
+
context = input_variable.T # 输入的句子
|
| 241 |
+
#print(context)
|
| 242 |
+
mask = torch.tensor(mask).to("cuda") # 对padding部分进行mask,和句子一个size,padding部分用0表示,如:[1, 1, 1, 1, 0, 0]
|
| 243 |
+
#print('mask',mask)
|
| 244 |
+
_out, pooled = self.bert(context, attention_mask=mask,output_all_encoded_layers=False ) #output_all_encoded_layers=False
|
| 245 |
+
print(_out.size(),"bert_out")
|
| 246 |
+
out,h_n = self.gru(_out)
|
| 247 |
+
print(h_n.size(),"hn_out",out.size(),"gru_out")
|
| 248 |
+
out = self.fc(h_n)
|
| 249 |
+
#print(out.size(),"fc_out")
|
| 250 |
+
out = out.view((-1, num_classes,num_labels))
|
| 251 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 252 |
+
out = F.log_softmax(out, dim=1)
|
| 253 |
+
return out
|
| 254 |
+
|
| 255 |
+
|
| 256 |
+
'''Bert还未池化的输出后面接Bigru,自定义一个可学习的权重参数,用于计算BiGRU前向输出和后向输出在加权求和中的权重'''
|
| 257 |
+
class Bert_BiGRU(nn.Module):
|
| 258 |
+
def __init__(self, input_size, bert_path,hidden_size, n_layers=1, dropout=0):
|
| 259 |
+
super(Bert_BiGRU, self).__init__()
|
| 260 |
+
self.n_layers = n_layers
|
| 261 |
+
self.hidden_size = hidden_size
|
| 262 |
+
self.bert = BertModel.from_pretrained(bert_path)
|
| 263 |
+
for param in self.bert.parameters():
|
| 264 |
+
param.requires_grad = True
|
| 265 |
+
#创建一个大小为2的参数,且可以被学习
|
| 266 |
+
self.weights = nn.Parameter(torch.randn(2))
|
| 267 |
+
self.gru=nn.GRU(input_size=hidden_size,hidden_size=hidden_size,num_layers=1,batch_first=True,bidirectional=False)
|
| 268 |
+
self.Bigru=nn.GRU(input_size=hidden_size,hidden_size=hidden_size,num_layers=1,batch_first=True,bidirectional=True)
|
| 269 |
+
self.fc = nn.Linear(hidden_size, num_labels * num_classes)
|
| 270 |
+
self.global_avg_pooling = nn.AdaptiveAvgPool1d(768)
|
| 271 |
+
|
| 272 |
+
def forward(self, input_variable,mask, hidden=None):
|
| 273 |
+
# samples=samples.T
|
| 274 |
+
# print(samples)
|
| 275 |
+
context = input_variable.T # 输入的句子
|
| 276 |
+
#print(context)
|
| 277 |
+
mask = torch.tensor(mask).to("cuda") # 对padding部分进行mask,和句子一个size,padding部分用0表示,如:[1, 1, 1, 1, 0, 0]
|
| 278 |
+
#print('mask',mask)
|
| 279 |
+
_out, pooled = self.bert(context, attention_mask=mask,output_all_encoded_layers=False ) #output_all_encoded_layers=False
|
| 280 |
+
#print(_out.size(),"bert_out")
|
| 281 |
+
Biout,Bih_n = self.Bigru(_out)
|
| 282 |
+
# 加权求和前向输出和后向输出
|
| 283 |
+
weighted_sum = self.weights[0] * Bih_n[0] + self.weights[1] * Bih_n[1]
|
| 284 |
+
#weighted_output = torch.einsum('sbh,h->sbh', Bih_n, self.weights)
|
| 285 |
+
#print(Bih_n.size(),"hn_out",Biout.size(),"gru_out")
|
| 286 |
+
#out,h_n = self.gru(_out)
|
| 287 |
+
#print(h_n.size(),"hn_out",out.size(),"gru_out")
|
| 288 |
+
#out = self.fc(h_n)
|
| 289 |
+
Biout = self.fc(weighted_sum)
|
| 290 |
+
Biout=Biout.unsqueeze(0)
|
| 291 |
+
#print(Biout.size(),"Biout-fc")
|
| 292 |
+
#print(out.size(),"fc_out")
|
| 293 |
+
Biout = Biout.view((-1, num_classes,num_labels))
|
| 294 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 295 |
+
out = F.log_softmax(Biout, dim=1)
|
| 296 |
+
return out
|
| 297 |
+
|
| 298 |
+
|
| 299 |
+
'''Bert编码后用CNN提取特征,CNN层的输出与Bert层输出拼接,再输入到GRU'''
|
| 300 |
+
class Bert_CNN_Gru(nn.Module):
|
| 301 |
+
def __init__(self, input_size, bert_path,hidden_size, n_layers=1, dropout=0):
|
| 302 |
+
super(Bert_CNN_Gru, self).__init__()
|
| 303 |
+
self.n_layers = n_layers
|
| 304 |
+
self.hidden_size = hidden_size
|
| 305 |
+
self.bert = BertModel.from_pretrained(bert_path)
|
| 306 |
+
for param in self.bert.parameters():
|
| 307 |
+
param.requires_grad = True
|
| 308 |
+
self.convs = nn.ModuleList(
|
| 309 |
+
[nn.Conv2d(in_channels=1, out_channels=256, kernel_size=(k, hidden_size)) for k in filter_sizes])
|
| 310 |
+
self.dropout = nn.Dropout(0.5)
|
| 311 |
+
self.gru=nn.GRU(input_size=hidden_size,hidden_size=hidden_size,num_layers=1,batch_first=True,bidirectional=False)
|
| 312 |
+
self.fc = nn.Linear(hidden_size, num_labels * num_classes)
|
| 313 |
+
|
| 314 |
+
def conv_and_pool(self, x, conv):
|
| 315 |
+
x=conv(x)
|
| 316 |
+
#print("conv",x.size())
|
| 317 |
+
x = F.relu(x).squeeze(3)
|
| 318 |
+
#print("x2",x.size())
|
| 319 |
+
x = F.max_pool1d(x, x.size(2)).squeeze(2)
|
| 320 |
+
#print("max_pool1d",x.size())
|
| 321 |
+
return x
|
| 322 |
+
|
| 323 |
+
def forward(self, input_variable,mask, hidden=None):
|
| 324 |
+
# samples=samples.T
|
| 325 |
+
# print(samples)
|
| 326 |
+
context = input_variable.T # 输入的句子
|
| 327 |
+
#print(context)
|
| 328 |
+
mask = torch.tensor(mask).to("cuda") # 对padding部分进行mask,和句子一个size,padding部分用0表示,如:[1, 1, 1, 1, 0, 0]
|
| 329 |
+
#print('mask',mask)
|
| 330 |
+
bert_out, pooled = self.bert(context, attention_mask=mask,output_all_encoded_layers=False ) #output_all_encoded_layers=False
|
| 331 |
+
#print(_out.size(),"bert_out")
|
| 332 |
+
#print("bert_out",bert_out.size())
|
| 333 |
+
out = bert_out.unsqueeze(1) #增加一个维度
|
| 334 |
+
out = torch.cat([self.conv_and_pool(out, conv) for conv in self.convs], 1)
|
| 335 |
+
out = self.dropout(out) #不知道在哪里dropout比较好,有待研究 [8,768]
|
| 336 |
+
out=out.unsqueeze(1) #[8,1,768]在第二维度上添加一个维度,以跟bert的输出做拼接cat
|
| 337 |
+
#print("dropout",out.size())
|
| 338 |
+
out = torch.cat([bert_out,out],dim=1) #在第二个维度做拼接 [8,501,768]
|
| 339 |
+
#print("cat",out.size())
|
| 340 |
+
out,h_n = self.gru(out) #[8,501,768]
|
| 341 |
+
#h_n=h_n.unsqueeze(0)
|
| 342 |
+
#print(h_n.size(),"gru_out")
|
| 343 |
+
out = self.fc(h_n) #[768,12*3]
|
| 344 |
+
print(out.size(),"fc_out")
|
| 345 |
+
out = out.view((-1, num_classes,num_labels))
|
| 346 |
+
print(out)
|
| 347 |
+
|
| 348 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 349 |
+
out = F.log_softmax(out, dim=1) #[0,0,1] [1,0,0] [1,0,01]
|
| 350 |
+
return out
|
| 351 |
+
|
| 352 |
+
|
| 353 |
+
'''Bert-CNN-BiGRU'''
|
| 354 |
+
class Bert_CNN_BiGru(nn.Module):
|
| 355 |
+
def __init__(self, input_size, bert_path,hidden_size, n_layers=1, dropout=0):
|
| 356 |
+
super(Bert_CNN_BiGru, self).__init__()
|
| 357 |
+
self.n_layers = n_layers
|
| 358 |
+
self.hidden_size = hidden_size
|
| 359 |
+
self.bert = BertModel.from_pretrained(bert_path)
|
| 360 |
+
for param in self.bert.parameters():
|
| 361 |
+
param.requires_grad = True
|
| 362 |
+
self.convs = nn.ModuleList(
|
| 363 |
+
[nn.Conv2d(in_channels=1, out_channels=256, kernel_size=(k, hidden_size)) for k in filter_sizes])
|
| 364 |
+
self.dropout = nn.Dropout(0.5)
|
| 365 |
+
self.gru=nn.GRU(input_size=hidden_size,hidden_size=hidden_size,num_layers=1,batch_first=True,bidirectional=True)
|
| 366 |
+
self.fc = nn.Linear(2304, num_labels * num_classes)
|
| 367 |
+
|
| 368 |
+
def conv_and_pool(self, x, conv):
|
| 369 |
+
x=conv(x)
|
| 370 |
+
#print("conv",x.size())
|
| 371 |
+
x = F.relu(x).squeeze(3)
|
| 372 |
+
#print("x2",x.size())
|
| 373 |
+
x = F.max_pool1d(x, x.size(2)).squeeze(2)
|
| 374 |
+
#print("max_pool1d",x.size())
|
| 375 |
+
return x
|
| 376 |
+
|
| 377 |
+
def forward(self, input_variable,mask, hidden=None):
|
| 378 |
+
# samples=samples.T
|
| 379 |
+
# print(samples)
|
| 380 |
+
context = input_variable.T # 输入的句子
|
| 381 |
+
#print(context)
|
| 382 |
+
mask = torch.tensor(mask).to("cuda") # 对padding部分进行mask,和句子一个size,padding部分用0表示,如:[1, 1, 1, 1, 0, 0]
|
| 383 |
+
#print('mask',mask)
|
| 384 |
+
bert_out, pooled = self.bert(context, attention_mask=mask,output_all_encoded_layers=False ) #output_all_encoded_layers=False
|
| 385 |
+
#print(_out.size(),"bert_out")
|
| 386 |
+
print("bert_out",bert_out.size())
|
| 387 |
+
out = bert_out.unsqueeze(1) #增加一个维度
|
| 388 |
+
cnn_out = torch.cat([self.conv_and_pool(out, conv) for conv in self.convs], 1)
|
| 389 |
+
print("cnn_out",cnn_out.size())
|
| 390 |
+
cnn_out = self.dropout(cnn_out) #不知道在哪里dropout比较好,有待研究 [8,768]
|
| 391 |
+
cnn_out=cnn_out.unsqueeze(0) #给CNN的输出增加一个维度,方便拼接,增加后的形状为[1, 8, 768]
|
| 392 |
+
gru_out,gru_h=self.gru(bert_out)
|
| 393 |
+
print("gru_h",gru_h.size(),"gru_out",gru_out.size())
|
| 394 |
+
split_tensors = torch.split(gru_h, split_size_or_sections=1, dim=0)
|
| 395 |
+
#print("split_tensors",split_tensors[0].size())
|
| 396 |
+
cat_output = torch.cat(split_tensors, dim=2) #拼接正向输出和反向输出
|
| 397 |
+
print('cat_gru_output',cat_output.size())
|
| 398 |
+
cat_cnn_gru=torch.cat([cat_output,cnn_out],dim=2)
|
| 399 |
+
print("cat_cnn_gru",cat_cnn_gru.size())
|
| 400 |
+
#out = torch.cat([bert_out,out],dim=1) #在第二个维度做拼接 [8,501,768]
|
| 401 |
+
#print("cat",out.size())
|
| 402 |
+
#out,h_n = self.gru(out) #[8,501,768]
|
| 403 |
+
#h_n=h_n.unsqueeze(0)
|
| 404 |
+
#print(h_n.size(),"gru_out")
|
| 405 |
+
out = self.fc(cat_cnn_gru) #[768,12*3]
|
| 406 |
+
print("fc_out",out.size())
|
| 407 |
+
out = out.view((-1, num_classes,num_labels))
|
| 408 |
+
print("view_out",out.size())
|
| 409 |
+
|
| 410 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 411 |
+
out = F.log_softmax(out, dim=1) #[0,0,1] [1,0,0] [1,0,01]
|
| 412 |
+
print("log_softmax_out",out.size())
|
| 413 |
+
return out
|
| 414 |
+
|
| 415 |
+
|
| 416 |
+
'''Bert-CNN-CLS-BiGRU'''
|
| 417 |
+
class Bert_CNN_CLS_BiGru(nn.Module):
|
| 418 |
+
def __init__(self, input_size, bert_path,hidden_size, n_layers=1, dropout=0):
|
| 419 |
+
super(Bert_CNN_CLS_BiGru, self).__init__()
|
| 420 |
+
self.n_layers = n_layers
|
| 421 |
+
self.hidden_size = hidden_size
|
| 422 |
+
self.bert = BertModel.from_pretrained(bert_path)
|
| 423 |
+
for param in self.bert.parameters():
|
| 424 |
+
param.requires_grad = True
|
| 425 |
+
self.convs = nn.ModuleList(
|
| 426 |
+
[nn.Conv2d(in_channels=1, out_channels=256, kernel_size=(k, hidden_size)) for k in filter_sizes])
|
| 427 |
+
self.dropout = nn.Dropout(0.5)
|
| 428 |
+
self.gru=nn.GRU(input_size=hidden_size,hidden_size=hidden_size,num_layers=1,batch_first=True,bidirectional=True)
|
| 429 |
+
self.fc = nn.Linear(3072, num_labels * num_classes)
|
| 430 |
+
|
| 431 |
+
def conv_and_pool(self, x, conv):
|
| 432 |
+
x=conv(x)
|
| 433 |
+
#print("conv",x.size())
|
| 434 |
+
x = F.relu(x).squeeze(3)
|
| 435 |
+
#print("x2",x.size())
|
| 436 |
+
x = F.max_pool1d(x, x.size(2)).squeeze(2)
|
| 437 |
+
#print("max_pool1d",x.size())
|
| 438 |
+
return x
|
| 439 |
+
|
| 440 |
+
def forward(self, input_variable,mask, hidden=None):
|
| 441 |
+
# samples=samples.T
|
| 442 |
+
# print(samples)
|
| 443 |
+
context = input_variable.T # 输入的句子
|
| 444 |
+
#print(context)
|
| 445 |
+
mask = torch.tensor(mask).to("cuda") # 对padding部分进行mask,和句子一个size,padding部分用0表示,如:[1, 1, 1, 1, 0, 0]
|
| 446 |
+
#print('mask',mask)
|
| 447 |
+
bert_out, pooled = self.bert(context, attention_mask=mask,output_all_encoded_layers=False ) #output_all_encoded_layers=False
|
| 448 |
+
#print(_out.size(),"bert_out")
|
| 449 |
+
#print("bert_out",bert_out.size())
|
| 450 |
+
#print("pooled_out",pooled.size())
|
| 451 |
+
bert_pool_out=pooled.unsqueeze(0)
|
| 452 |
+
out = bert_out.unsqueeze(1) #增加一个维度
|
| 453 |
+
cnn_out = torch.cat([self.conv_and_pool(out, conv) for conv in self.convs], 1)
|
| 454 |
+
#print("cnn_out",cnn_out.size())
|
| 455 |
+
cnn_out = self.dropout(cnn_out) #不知道在哪里dropout比较好,有待研究 [8,768]
|
| 456 |
+
cnn_out=cnn_out.unsqueeze(0) #给CNN的输出增加一个维度,方便拼接,增加后的形状为[1, 8, 768]
|
| 457 |
+
gru_out,gru_h=self.gru(bert_out)
|
| 458 |
+
#print("gru_h",gru_h.size())
|
| 459 |
+
split_tensors = torch.split(gru_h, split_size_or_sections=1, dim=0)
|
| 460 |
+
#print("split_tensors",split_tensors[0].size())
|
| 461 |
+
cat_output = torch.cat(split_tensors, dim=2) #拼接正向输出和反向输出
|
| 462 |
+
#print('cat_output',cat_output.size())
|
| 463 |
+
#cat_cnn_gru=torch.cat([cat_output,cnn_out],dim=2)
|
| 464 |
+
cat_cnn_gru=torch.cat([cat_output,cnn_out,bert_pool_out],dim=2)
|
| 465 |
+
#print("cat_cnn_gru",cat_cnn_gru.size())
|
| 466 |
+
#out = torch.cat([bert_out,out],dim=1) #在第二个维度做拼接 [8,501,768]
|
| 467 |
+
#print("cat",out.size())
|
| 468 |
+
#out,h_n = self.gru(out) #[8,501,768]
|
| 469 |
+
#h_n=h_n.unsqueeze(0)
|
| 470 |
+
#print(h_n.size(),"gru_out")
|
| 471 |
+
out = self.fc(cat_cnn_gru) #[768,12*3]
|
| 472 |
+
#print(out.size(),"fc_out")
|
| 473 |
+
out = out.view((-1, num_classes,num_labels))
|
| 474 |
+
#print(out)
|
| 475 |
+
|
| 476 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 477 |
+
out = F.log_softmax(out, dim=1) #[0,0,1] [1,0,0] [1,0,01]
|
| 478 |
+
return out
|
| 479 |
+
|
| 480 |
+
|
| 481 |
+
'''Bert-CNN-CLS-BiGRU,Self-attention'''
|
| 482 |
+
class Bert_CNN_CLS_BiGru1(nn.Module):
|
| 483 |
+
def __init__(self, input_size, bert_path,hidden_size, n_layers=1, dropout=0):
|
| 484 |
+
super(Bert_CNN_CLS_BiGru1, self).__init__()
|
| 485 |
+
self.n_layers = n_layers
|
| 486 |
+
self.hidden_size = hidden_size
|
| 487 |
+
self.bert = BertModel.from_pretrained(bert_path)
|
| 488 |
+
for param in self.bert.parameters():
|
| 489 |
+
param.requires_grad = True
|
| 490 |
+
self.convs = nn.ModuleList(
|
| 491 |
+
[nn.Conv2d(in_channels=1, out_channels=256, kernel_size=(k, hidden_size)) for k in filter_sizes])
|
| 492 |
+
self.dropout = nn.Dropout(0.5)
|
| 493 |
+
self.gru=nn.GRU(input_size=hidden_size,hidden_size=hidden_size,num_layers=1,batch_first=True,bidirectional=True)
|
| 494 |
+
self.attention=nn.MultiheadAttention(3072,num_heads=6) #接在双头GRU后面,让模型自动注意哪部分权重比较重要
|
| 495 |
+
self.fc = nn.Linear(3072, num_labels * num_classes)
|
| 496 |
+
|
| 497 |
+
def conv_and_pool(self, x, conv):
|
| 498 |
+
x=conv(x)
|
| 499 |
+
#print("conv",x.size())
|
| 500 |
+
x = F.relu(x).squeeze(3)
|
| 501 |
+
#print("x2",x.size())
|
| 502 |
+
x = F.max_pool1d(x, x.size(2)).squeeze(2)
|
| 503 |
+
#print("max_pool1d",x.size())
|
| 504 |
+
return x
|
| 505 |
+
|
| 506 |
+
def forward(self, input_variable,mask, hidden=None):
|
| 507 |
+
# samples=samples.T
|
| 508 |
+
# print(samples)
|
| 509 |
+
context = input_variable.T # 输入的句子
|
| 510 |
+
#print(context)
|
| 511 |
+
mask = torch.tensor(mask).to("cuda") # 对padding部分进行mask,和句子一个size,padding部分用0表示,如:[1, 1, 1, 1, 0, 0]
|
| 512 |
+
#print('mask',mask)
|
| 513 |
+
bert_out, pooled = self.bert(context, attention_mask=mask,output_all_encoded_layers=False ) #output_all_encoded_layers=False bert_out=[8,500,768]
|
| 514 |
+
#print(_out.size(),"bert_out")
|
| 515 |
+
#print("bert_out",bert_out.size())
|
| 516 |
+
#print("pooled_out",pooled.size())
|
| 517 |
+
bert_pool_out=pooled.unsqueeze(0)
|
| 518 |
+
out = bert_out.unsqueeze(1) #增加一个维度
|
| 519 |
+
cnn_out = torch.cat([self.conv_and_pool(out, conv) for conv in self.convs], 1)
|
| 520 |
+
#print("cnn_out",cnn_out.size())
|
| 521 |
+
cnn_out = self.dropout(cnn_out) #不知道在哪里dropout比较好,有待研究 [8,768]
|
| 522 |
+
cnn_out=cnn_out.unsqueeze(0) #给CNN的输出增加一个维度,方便拼接,增加后的形状为[1, 8, 768]
|
| 523 |
+
gru_out,gru_h=self.gru(bert_out)
|
| 524 |
+
#print("gru_h",gru_h.size()) #[2, 8, 768]
|
| 525 |
+
split_tensors = torch.split(gru_h, split_size_or_sections=1, dim=0)
|
| 526 |
+
#print("split_tensors",split_tensors[0].size())
|
| 527 |
+
cat_output = torch.cat(split_tensors, dim=2) #拼接正向输出和反向输出
|
| 528 |
+
#print('cat_output',cat_output.size())
|
| 529 |
+
#cat_cnn_gru=torch.cat([cat_output,cnn_out],dim=2)
|
| 530 |
+
cat_cnn_gru=torch.cat([cat_output,cnn_out,bert_pool_out],dim=2) #拼接BiGRU的输出和CNN的输出以及bert_cls的输出
|
| 531 |
+
#print("cat_cnn_gru",cat_cnn_gru.size())
|
| 532 |
+
att_out,_=self.attention(cat_cnn_gru,cat_cnn_gru,cat_cnn_gru) #[1, 8, 3072]
|
| 533 |
+
#print("att_out",att_out.size())
|
| 534 |
+
out = self.fc(att_out)
|
| 535 |
+
#print(out.size(),"fc_out")
|
| 536 |
+
out = out.view((-1, num_classes,num_labels))
|
| 537 |
+
#print(out)
|
| 538 |
+
|
| 539 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 540 |
+
out = F.log_softmax(out, dim=1) #[0,0,1] [1,0,0] [1,0,01]
|
| 541 |
+
return out
|
| 542 |
+
|
| 543 |
+
'''attention,准确率比较低(bert-attention-gru)'''
|
| 544 |
+
class Bert_GRU_attention(nn.Module):
|
| 545 |
+
def __init__(self, input_size, bert_path,hidden_size, n_layers=1, dropout=0):
|
| 546 |
+
super(Bert_GRU_attention, self).__init__()
|
| 547 |
+
self.n_layers = n_layers
|
| 548 |
+
self.hidden_size = hidden_size
|
| 549 |
+
self.bert = BertModel.from_pretrained(bert_path)
|
| 550 |
+
for param in self.bert.parameters():
|
| 551 |
+
param.requires_grad = True
|
| 552 |
+
self.gru=nn.GRU(input_size=hidden_size,hidden_size=hidden_size,num_layers=1,batch_first=True,bidirectional=False)
|
| 553 |
+
self.attention=nn.MultiheadAttention(embed_dim=768,num_heads=8,batch_first=True)
|
| 554 |
+
self.fc = nn.Linear(hidden_size, num_labels * num_classes)
|
| 555 |
+
|
| 556 |
+
|
| 557 |
+
def forward(self, input_variable,mask, hidden=None):
|
| 558 |
+
# samples=samples.T
|
| 559 |
+
# print(samples)
|
| 560 |
+
context = input_variable.T # 输入的句子
|
| 561 |
+
#print(context)
|
| 562 |
+
mask = torch.tensor(mask).to("cuda") # 对padding部分进行mask,和句子一个size,padding部分用0表示,如:[1, 1, 1, 1, 0, 0]
|
| 563 |
+
#print('mask',mask)
|
| 564 |
+
_out, pooled = self.bert(context, attention_mask=mask,output_all_encoded_layers=False ) #output_all_encoded_layers=False
|
| 565 |
+
_out,attention_weights =self.attention(_out,_out,_out)
|
| 566 |
+
print(_out.size(),"bert_out")
|
| 567 |
+
out,h_n = self.gru(_out)
|
| 568 |
+
print(h_n.size(),"hn_out",out.size(),"gru_out")
|
| 569 |
+
out = self.fc(h_n)
|
| 570 |
+
#print(out.size(),"fc_out")
|
| 571 |
+
out = out.view((-1, num_classes,num_labels))
|
| 572 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 573 |
+
out = F.log_softmax(out, dim=1)
|
| 574 |
+
return out
|
| 575 |
+
|
| 576 |
+
|
| 577 |
+
'''attention,(bert-gru-attention)'''
|
| 578 |
+
class Bert_GRU_attention(nn.Module):
|
| 579 |
+
def __init__(self, input_size, bert_path,hidden_size, n_layers=1, dropout=0):
|
| 580 |
+
super(Bert_GRU_attention, self).__init__()
|
| 581 |
+
self.n_layers = n_layers
|
| 582 |
+
self.hidden_size = hidden_size
|
| 583 |
+
self.bert = BertModel.from_pretrained(bert_path)
|
| 584 |
+
for param in self.bert.parameters():
|
| 585 |
+
param.requires_grad = True
|
| 586 |
+
self.gru=nn.GRU(input_size=hidden_size,hidden_size=hidden_size,num_layers=1,batch_first=True,bidirectional=False)
|
| 587 |
+
self.attention=nn.MultiheadAttention(embed_dim=768,num_heads=8,batch_first=True)
|
| 588 |
+
self.fc = nn.Linear(hidden_size, num_labels * num_classes)
|
| 589 |
+
|
| 590 |
+
|
| 591 |
+
def forward(self, input_variable,mask, hidden=None):
|
| 592 |
+
# samples=samples.T
|
| 593 |
+
# print(samples)
|
| 594 |
+
context = input_variable.T # 输入的句子
|
| 595 |
+
#print(context)
|
| 596 |
+
mask = torch.tensor(mask).to("cuda") # 对padding部分进行mask,和句子一个size,padding部分用0表示,如:[1, 1, 1, 1, 0, 0]
|
| 597 |
+
#print('mask',mask)
|
| 598 |
+
_out, pooled = self.bert(context, attention_mask=mask,output_all_encoded_layers=False ) #output_all_encoded_layers=False
|
| 599 |
+
#_out,attention_weights =self.attention(_out,_out,_out)
|
| 600 |
+
print(_out.size(),"bert_out")
|
| 601 |
+
out,h_n = self.gru(_out)
|
| 602 |
+
print(h_n.size(),"hn_out",out.size(),"gru_out")
|
| 603 |
+
out = self.fc(h_n)
|
| 604 |
+
#print(out.size(),"fc_out")
|
| 605 |
+
out = out.view((-1, num_classes,num_labels))
|
| 606 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 607 |
+
out = F.log_softmax(out, dim=1)
|
| 608 |
+
return out
|
| 609 |
+
|
| 610 |
+
'''bert未池化用BiGru提取特征'''
|
| 611 |
+
class Bert_BiGRU1(nn.Module):
|
| 612 |
+
def __init__(self, input_size, bert_path,hidden_size, n_layers=1, dropout=0):
|
| 613 |
+
super(Bert_BiGRU1, self).__init__()
|
| 614 |
+
self.n_layers = n_layers
|
| 615 |
+
self.hidden_size = hidden_size
|
| 616 |
+
self.bert = BertModel.from_pretrained(bert_path)
|
| 617 |
+
for param in self.bert.parameters():
|
| 618 |
+
param.requires_grad = True
|
| 619 |
+
#self.gru=nn.GRU(input_size=pad_size * hidden_size,hidden_size=hidden_size,num_layers=1,batch_first=True,bidirectional=False)
|
| 620 |
+
self.gru=nn.GRU(input_size=hidden_size,hidden_size=hidden_size,num_layers=1,batch_first=True,bidirectional=False)
|
| 621 |
+
self.fc = nn.Linear(hidden_size, num_labels * num_classes)
|
| 622 |
+
#self.attention=nn.MultiheadAttention()
|
| 623 |
+
|
| 624 |
+
def forward(self, input_variable,mask, hidden=None):
|
| 625 |
+
# samples=samples.T
|
| 626 |
+
# print(samples)
|
| 627 |
+
context = input_variable.T # 输入的句子
|
| 628 |
+
#print(context)
|
| 629 |
+
mask = torch.tensor(mask).to("cuda") # 对padding部分进行mask,和句子一个size,padding部分用0表示,如:[1, 1, 1, 1, 0, 0]
|
| 630 |
+
#print('mask',mask)
|
| 631 |
+
_out, pooled = self.bert(context, attention_mask=mask,output_all_encoded_layers=False ) #output_all_encoded_layers=False
|
| 632 |
+
print(_out,"bert_out") #(10,350,768)
|
| 633 |
+
#out = _out.reshape(chunk_size, pad_size * hidden_size)
|
| 634 |
+
out,h_n = self.gru(_out)
|
| 635 |
+
#print(out.size(),"gru_out",h_n.size(),"h_n") #(10,350,768*2)
|
| 636 |
+
print(out,"gru_out",h_n,"h_n") #(10,350,768*2)
|
| 637 |
+
out = self.fc(h_n)
|
| 638 |
+
#print(out.size(),"fc_out")
|
| 639 |
+
out = out.view((-1, num_classes,num_labels))
|
| 640 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 641 |
+
out = F.log_softmax(out, dim=1)
|
| 642 |
+
return out
|
| 643 |
+
|
| 644 |
+
|
| 645 |
+
'''Bert还未池化的输出后面接gru,相当于用bert编码,用gru提起特征,再与cls做残差连接'''
|
| 646 |
+
class Bert_GRU3(nn.Module):
|
| 647 |
+
def __init__(self, input_size, bert_path,hidden_size, n_layers=1, dropout=0):
|
| 648 |
+
super(Bert_GRU3, self).__init__()
|
| 649 |
+
self.n_layers = n_layers
|
| 650 |
+
self.hidden_size = hidden_size
|
| 651 |
+
self.bert = BertModel.from_pretrained(bert_path)
|
| 652 |
+
for param in self.bert.parameters():
|
| 653 |
+
param.requires_grad = True
|
| 654 |
+
self.gru=nn.GRU(input_size=hidden_size,hidden_size=hidden_size,num_layers=1,batch_first=True,bidirectional=False)
|
| 655 |
+
self.fc = nn.Linear(hidden_size, num_labels * num_classes)
|
| 656 |
+
|
| 657 |
+
|
| 658 |
+
def forward(self, input_variable,mask, hidden=None):
|
| 659 |
+
# samples=samples.T
|
| 660 |
+
# print(samples)
|
| 661 |
+
context = input_variable.T # 输入的句子
|
| 662 |
+
#print(context)
|
| 663 |
+
mask = torch.tensor(mask).to("cuda") # 对padding部分进行mask,和句子一个size,padding部分用0表示,如:[1, 1, 1, 1, 0, 0]
|
| 664 |
+
#print('mask',mask)
|
| 665 |
+
_out, pooled = self.bert(context, attention_mask=mask,output_all_encoded_layers=False ) #output_all_encoded_layers=False
|
| 666 |
+
#print(_out.size(),"bert_out")
|
| 667 |
+
out,h_n = self.gru(_out)
|
| 668 |
+
out=h_n+pooled
|
| 669 |
+
#print(out.size(),"gru_out")
|
| 670 |
+
out = self.fc(out)
|
| 671 |
+
#print(out.size(),"fc_out")
|
| 672 |
+
out = out.view((-1, num_classes,num_labels))
|
| 673 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 674 |
+
out = F.log_softmax(out, dim=1)
|
| 675 |
+
#out = F.softmax(out, dim=1)
|
| 676 |
+
return out
|
| 677 |
+
|
| 678 |
+
'''尝试将方面词编码进bert'''
|
| 679 |
+
class Bert_GRU4(nn.Module):
|
| 680 |
+
def __init__(self, input_size, bert_path,hidden_size, n_layers=1, dropout=0):
|
| 681 |
+
super(Bert_GRU4, self).__init__()
|
| 682 |
+
self.n_layers = n_layers
|
| 683 |
+
self.hidden_size = hidden_size
|
| 684 |
+
self.bert = BertModel.from_pretrained(bert_path)
|
| 685 |
+
for param in self.bert.parameters():
|
| 686 |
+
param.requires_grad = True
|
| 687 |
+
self.gru=nn.GRU(input_size=hidden_size,hidden_size=hidden_size,num_layers=1,batch_first=True,bidirectional=False)
|
| 688 |
+
self.fc = nn.Linear(hidden_size, num_labels * num_classes)
|
| 689 |
+
|
| 690 |
+
|
| 691 |
+
def forward(self, input_variable,mask, hidden=None):
|
| 692 |
+
# samples=samples.T
|
| 693 |
+
# print(samples)
|
| 694 |
+
context = input_variable.T # 输入的句子
|
| 695 |
+
#print(context)
|
| 696 |
+
mask = torch.tensor(mask).to("cuda") # 对padding部分进行mask,和句子一个size,padding部分��0表示,如:[1, 1, 1, 1, 0, 0]
|
| 697 |
+
#print('mask',mask)
|
| 698 |
+
_out, pooled = self.bert(context, attention_mask=mask,output_all_encoded_layers=False ) #output_all_encoded_layers=False
|
| 699 |
+
#print(_out.size(),"bert_out")
|
| 700 |
+
out,h_n = self.gru(_out)
|
| 701 |
+
out=h_n+pooled
|
| 702 |
+
#print(out.size(),"gru_out")
|
| 703 |
+
out = self.fc(out)
|
| 704 |
+
#print(out.size(),"fc_out")
|
| 705 |
+
out = out.view((-1, num_classes,num_labels))
|
| 706 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 707 |
+
out = F.log_softmax(out, dim=1)
|
| 708 |
+
#out = F.softmax(out, dim=1)
|
| 709 |
+
return out
|
| 710 |
+
|
| 711 |
+
'''先用bert编码,再用GRU对编码进行特征提起,最后将bert编码和gru输出做残差连接'''
|
| 712 |
+
class Bert_GRU_Add(nn.Module):
|
| 713 |
+
def __init__(self, input_size, bert_path,hidden_size, n_layers=1, dropout=0):
|
| 714 |
+
super(Bert_GRU_Add, self).__init__()
|
| 715 |
+
self.n_layers = n_layers
|
| 716 |
+
self.hidden_size = hidden_size
|
| 717 |
+
self.bert = BertModel.from_pretrained(bert_path)
|
| 718 |
+
for param in self.bert.parameters():
|
| 719 |
+
param.requires_grad = True
|
| 720 |
+
self.gru=nn.GRU(hidden_size,hidden_size,1,bidirectional=False)
|
| 721 |
+
self.fc = nn.Linear(hidden_size, num_labels * num_classes)
|
| 722 |
+
|
| 723 |
+
|
| 724 |
+
def forward(self, input_variable,mask, hidden=None):
|
| 725 |
+
# samples=samples.T
|
| 726 |
+
# print(samples)
|
| 727 |
+
context = input_variable.T # 输入的句子
|
| 728 |
+
#print(context)
|
| 729 |
+
mask = torch.tensor(mask).to("cuda") # 对padding部分进行mask,和句子一个size,padding部分用0表示,如:[1, 1, 1, 1, 0, 0]
|
| 730 |
+
#print('mask',mask)
|
| 731 |
+
_, pooled = self.bert(context, attention_mask=mask,output_all_encoded_layers=False ) #output_all_encoded_layers=False
|
| 732 |
+
#print(pooled.size())
|
| 733 |
+
out,_ = self.gru(pooled)
|
| 734 |
+
out=out+pooled
|
| 735 |
+
out = self.fc(out)
|
| 736 |
+
out = out.view((-1, num_classes,num_labels))
|
| 737 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 738 |
+
out = F.log_softmax(out, dim=1)
|
| 739 |
+
return out
|
| 740 |
+
|
| 741 |
+
class BiGRU(nn.Module):
|
| 742 |
+
def __init__(self, input_size, hidden_size, num_layers,dropout, bidirectional=True):
|
| 743 |
+
super(BiGRU, self).__init__()
|
| 744 |
+
self.hidden_size = hidden_size
|
| 745 |
+
self.num_layers = num_layers
|
| 746 |
+
self.bidirectional = bidirectional
|
| 747 |
+
|
| 748 |
+
self.forward_gru = nn.GRU(input_size, hidden_size, num_layers,dropout=dropout, bidirectional=bidirectional)
|
| 749 |
+
self.backward_gru = nn.GRU(input_size, hidden_size, num_layers,dropout=dropout, bidirectional=bidirectional)
|
| 750 |
+
|
| 751 |
+
def forward(self, input):
|
| 752 |
+
forward_output, _ = self.forward_gru(input)
|
| 753 |
+
|
| 754 |
+
# Reverse the input sequence
|
| 755 |
+
reversed_input = torch.flip(input, [0])
|
| 756 |
+
backward_output, _ = self.backward_gru(reversed_input)
|
| 757 |
+
|
| 758 |
+
if self.bidirectional:
|
| 759 |
+
# Concatenate forward and backward outputs along the last dimension
|
| 760 |
+
output = torch.cat([forward_output, torch.flip(backward_output, [0])], dim=-1)
|
| 761 |
+
else:
|
| 762 |
+
# Use only the forward output
|
| 763 |
+
output = forward_output
|
| 764 |
+
|
| 765 |
+
return output
|
| 766 |
+
class MyEncoderRNN(nn.Module):
|
| 767 |
+
def __init__(self, input_size, hidden_size, n_layers=1, dropout=0):
|
| 768 |
+
super(MyEncoderRNN, self).__init__()
|
| 769 |
+
self.n_layers = n_layers
|
| 770 |
+
self.hidden_size = hidden_size
|
| 771 |
+
self.embedding = nn.Embedding(input_size, hidden_size)
|
| 772 |
+
# Initialize GRU; the input_size and hidden_size params are both set to 'hidden_size'
|
| 773 |
+
# because our input size is a word embedding with number of features == hidden_size
|
| 774 |
+
self.gru = nn.GRU(hidden_size, hidden_size, n_layers,
|
| 775 |
+
dropout=(0 if n_layers == 1 else dropout), bidirectional=True)
|
| 776 |
+
self.lstm=nn.LSTM(hidden_size,hidden_size)
|
| 777 |
+
self.rnn=nn.RNN(hidden_size,hidden_size)
|
| 778 |
+
self.fc = nn.Linear(hidden_size, num_labels * num_classes)
|
| 779 |
+
def forward(self, input_seq, input_lengths, hidden=None):
|
| 780 |
+
# input_seq = [sent len, batch size]
|
| 781 |
+
# Convert word indexes to embeddings
|
| 782 |
+
embedded = self.embedding(input_seq)
|
| 783 |
+
# embedded = [sent len, batch size, hidden size]
|
| 784 |
+
# Pack padded batch of sequences for RNN module
|
| 785 |
+
packed = torch.nn.utils.rnn.pack_padded_sequence(embedded, input_lengths)
|
| 786 |
+
# Forward pass through GRU
|
| 787 |
+
outputs, hidden = self.gru(packed, hidden)
|
| 788 |
+
|
| 789 |
+
# Unpack padding
|
| 790 |
+
outputs, _ = torch.nn.utils.rnn.pad_packed_sequence(outputs)
|
| 791 |
+
# Sum bidirectional GRU outputs
|
| 792 |
+
outputs = outputs[:, :, :self.hidden_size] + outputs[:, :, self.hidden_size:]
|
| 793 |
+
|
| 794 |
+
outputs,hidden_lstm = self.lstm(outputs)
|
| 795 |
+
outputs,_=self.rnn(outputs)
|
| 796 |
+
#print("type", type(outputs))
|
| 797 |
+
# outputs = [sent len, batch size, hidden size]
|
| 798 |
+
# outputs = outputs[-1]
|
| 799 |
+
|
| 800 |
+
# Extract the outputs for the last timestep of each example
|
| 801 |
+
idx = (input_lengths - 1).view(-1, 1).expand(
|
| 802 |
+
len(input_lengths), outputs.size(2))
|
| 803 |
+
time_dimension = 1 if batch_first else 0
|
| 804 |
+
idx = idx.unsqueeze(time_dimension)
|
| 805 |
+
# Shape: (batch_size, rnn_hidden_dim)
|
| 806 |
+
outputs = outputs.gather(
|
| 807 |
+
time_dimension, Variable(idx)).squeeze(time_dimension)
|
| 808 |
+
|
| 809 |
+
# outputs = [batch size, hidden size]
|
| 810 |
+
outputs = self.fc(outputs)
|
| 811 |
+
# outputs = [batch size, num_labels * num_classes]
|
| 812 |
+
outputs = outputs.view((-1, num_classes, num_labels))
|
| 813 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 814 |
+
outputs = F.log_softmax(outputs, dim=1)
|
| 815 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 816 |
+
|
| 817 |
+
# Return output
|
| 818 |
+
return outputs
|
| 819 |
+
|
| 820 |
+
|
| 821 |
+
class AttentionEncoderRNN(nn.Module):
|
| 822 |
+
def __init__(self, input_size, hidden_size, n_layers=1, dropout=0):
|
| 823 |
+
super(AttentionEncoderRNN, self).__init__()
|
| 824 |
+
self.n_layers = n_layers
|
| 825 |
+
self.hidden_size = hidden_size
|
| 826 |
+
self.embedding = nn.Embedding(input_size, hidden_size)
|
| 827 |
+
# Initialize GRU; the input_size and hidden_size params are both set to 'hidden_size'
|
| 828 |
+
# because our input size is a word embedding with number of features == hidden_size
|
| 829 |
+
self.gru = nn.GRU(hidden_size, hidden_size, n_layers,
|
| 830 |
+
dropout=(0 if n_layers == 1 else dropout), bidirectional=True)
|
| 831 |
+
self.att=nn.MultiheadAttention(hidden_size, 8, batch_first=True)
|
| 832 |
+
self.fc = nn.Linear(hidden_size, num_labels * num_classes)
|
| 833 |
+
def forward(self, input_seq, input_lengths, hidden=None):
|
| 834 |
+
# input_seq = [sent len, batch size]
|
| 835 |
+
# Convert word indexes to embeddings
|
| 836 |
+
embedded = self.embedding(input_seq)
|
| 837 |
+
#print("embedded",embedded.size(),embedded)
|
| 838 |
+
|
| 839 |
+
# embedded = [sent len, batch size, hidden size]
|
| 840 |
+
# Pack padded batch of sequences for RNN module
|
| 841 |
+
packed = torch.nn.utils.rnn.pack_padded_sequence(embedded, input_lengths)
|
| 842 |
+
outputs, _ = torch.nn.utils.rnn.pad_packed_sequence(packed)
|
| 843 |
+
att,_ = self.att(outputs, outputs, outputs)
|
| 844 |
+
# Forward pass through GRU
|
| 845 |
+
outputs, hidden = self.gru(att, hidden)
|
| 846 |
+
# Unpack padding
|
| 847 |
+
#outputs, _ = torch.nn.utils.rnn.pad_packed_sequence(outputs)
|
| 848 |
+
# Sum bidirectional GRU outputs
|
| 849 |
+
outputs = outputs[:, :, :self.hidden_size] + outputs[:, :, self.hidden_size:]
|
| 850 |
+
#print('outputs',outputs.size(),outputs)
|
| 851 |
+
#print("type", type(outputs))
|
| 852 |
+
# outputs = [sent len, batch size, hidden size]
|
| 853 |
+
# outputs = outputs[-1]
|
| 854 |
+
|
| 855 |
+
# Extract the outputs for the last timestep of each example
|
| 856 |
+
idx = (input_lengths - 1).view(-1, 1).expand(
|
| 857 |
+
len(input_lengths), outputs.size(2))
|
| 858 |
+
time_dimension = 1 if batch_first else 0
|
| 859 |
+
idx = idx.unsqueeze(time_dimension)
|
| 860 |
+
# Shape: (batch_size, rnn_hidden_dim)
|
| 861 |
+
outputs = outputs.gather(
|
| 862 |
+
time_dimension, Variable(idx)).squeeze(time_dimension)
|
| 863 |
+
|
| 864 |
+
# outputs = [batch size, hidden size]
|
| 865 |
+
outputs = self.fc(outputs)
|
| 866 |
+
# outputs = [batch size, num_labels * num_classes]
|
| 867 |
+
outputs = outputs.view((-1, num_classes, num_labels))
|
| 868 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 869 |
+
outputs = F.log_softmax(outputs, dim=1)
|
| 870 |
+
# outputs = [batch size, num_classes, num_labels]
|
| 871 |
+
|
| 872 |
+
# Return output
|
| 873 |
+
return outputs
|